var/home/core/zuul-output/0000755000175000017500000000000015071223614014526 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015071234512015471 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004516544615071234504017715 0ustar rootrootOct 07 14:48:41 crc systemd[1]: Starting Kubernetes Kubelet... Oct 07 14:48:41 crc restorecon[4671]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 07 14:48:41 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 14:48:42 crc restorecon[4671]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 07 14:48:43 crc kubenswrapper[4672]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 14:48:43 crc kubenswrapper[4672]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 07 14:48:43 crc kubenswrapper[4672]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 14:48:43 crc kubenswrapper[4672]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 14:48:43 crc kubenswrapper[4672]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 07 14:48:43 crc kubenswrapper[4672]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.443209 4672 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447704 4672 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447729 4672 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447738 4672 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447745 4672 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447751 4672 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447758 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447768 4672 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447777 4672 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447784 4672 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447791 4672 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447798 4672 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447805 4672 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447812 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447818 4672 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447825 4672 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447831 4672 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447837 4672 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447856 4672 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447862 4672 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447867 4672 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447872 4672 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447877 4672 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447882 4672 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447888 4672 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447893 4672 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447898 4672 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447903 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447908 4672 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447914 4672 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447919 4672 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447925 4672 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447930 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447937 4672 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447944 4672 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447949 4672 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447958 4672 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447964 4672 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447970 4672 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447976 4672 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447982 4672 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447987 4672 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447992 4672 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.447997 4672 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448002 4672 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448008 4672 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448032 4672 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448038 4672 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448043 4672 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448048 4672 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448054 4672 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448059 4672 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448064 4672 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448071 4672 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448077 4672 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448084 4672 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448090 4672 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448096 4672 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448101 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448107 4672 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448112 4672 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448120 4672 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448125 4672 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448131 4672 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448136 4672 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448141 4672 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448147 4672 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448152 4672 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448158 4672 feature_gate.go:330] unrecognized feature gate: Example Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448163 4672 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448168 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.448174 4672 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449441 4672 flags.go:64] FLAG: --address="0.0.0.0" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449464 4672 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449481 4672 flags.go:64] FLAG: --anonymous-auth="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449491 4672 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449501 4672 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449509 4672 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449519 4672 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449527 4672 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449535 4672 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449543 4672 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449551 4672 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449560 4672 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449567 4672 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449575 4672 flags.go:64] FLAG: --cgroup-root="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449582 4672 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449589 4672 flags.go:64] FLAG: --client-ca-file="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449596 4672 flags.go:64] FLAG: --cloud-config="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449603 4672 flags.go:64] FLAG: --cloud-provider="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449610 4672 flags.go:64] FLAG: --cluster-dns="[]" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449618 4672 flags.go:64] FLAG: --cluster-domain="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449625 4672 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449632 4672 flags.go:64] FLAG: --config-dir="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449639 4672 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449662 4672 flags.go:64] FLAG: --container-log-max-files="5" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449671 4672 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449677 4672 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449684 4672 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449691 4672 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449698 4672 flags.go:64] FLAG: --contention-profiling="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449704 4672 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449711 4672 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449718 4672 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449727 4672 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449736 4672 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449743 4672 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449750 4672 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449756 4672 flags.go:64] FLAG: --enable-load-reader="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449763 4672 flags.go:64] FLAG: --enable-server="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449770 4672 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449781 4672 flags.go:64] FLAG: --event-burst="100" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449789 4672 flags.go:64] FLAG: --event-qps="50" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449797 4672 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449805 4672 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449812 4672 flags.go:64] FLAG: --eviction-hard="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449824 4672 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449832 4672 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449840 4672 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449848 4672 flags.go:64] FLAG: --eviction-soft="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449855 4672 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449862 4672 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449870 4672 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449878 4672 flags.go:64] FLAG: --experimental-mounter-path="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449885 4672 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449892 4672 flags.go:64] FLAG: --fail-swap-on="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449898 4672 flags.go:64] FLAG: --feature-gates="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449905 4672 flags.go:64] FLAG: --file-check-frequency="20s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449912 4672 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449920 4672 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449927 4672 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449933 4672 flags.go:64] FLAG: --healthz-port="10248" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449939 4672 flags.go:64] FLAG: --help="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449946 4672 flags.go:64] FLAG: --hostname-override="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449952 4672 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449958 4672 flags.go:64] FLAG: --http-check-frequency="20s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449964 4672 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449970 4672 flags.go:64] FLAG: --image-credential-provider-config="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449976 4672 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449983 4672 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449990 4672 flags.go:64] FLAG: --image-service-endpoint="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.449996 4672 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450002 4672 flags.go:64] FLAG: --kube-api-burst="100" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450008 4672 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450038 4672 flags.go:64] FLAG: --kube-api-qps="50" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450045 4672 flags.go:64] FLAG: --kube-reserved="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450051 4672 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450057 4672 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450063 4672 flags.go:64] FLAG: --kubelet-cgroups="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450069 4672 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450076 4672 flags.go:64] FLAG: --lock-file="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450081 4672 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450088 4672 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450094 4672 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450103 4672 flags.go:64] FLAG: --log-json-split-stream="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450109 4672 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450115 4672 flags.go:64] FLAG: --log-text-split-stream="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450121 4672 flags.go:64] FLAG: --logging-format="text" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450128 4672 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450134 4672 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450140 4672 flags.go:64] FLAG: --manifest-url="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450146 4672 flags.go:64] FLAG: --manifest-url-header="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450155 4672 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450161 4672 flags.go:64] FLAG: --max-open-files="1000000" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450169 4672 flags.go:64] FLAG: --max-pods="110" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450175 4672 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450181 4672 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450187 4672 flags.go:64] FLAG: --memory-manager-policy="None" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450193 4672 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450199 4672 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450206 4672 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450212 4672 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450228 4672 flags.go:64] FLAG: --node-status-max-images="50" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450235 4672 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450243 4672 flags.go:64] FLAG: --oom-score-adj="-999" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450251 4672 flags.go:64] FLAG: --pod-cidr="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450258 4672 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450268 4672 flags.go:64] FLAG: --pod-manifest-path="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450274 4672 flags.go:64] FLAG: --pod-max-pids="-1" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450280 4672 flags.go:64] FLAG: --pods-per-core="0" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450287 4672 flags.go:64] FLAG: --port="10250" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450294 4672 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450299 4672 flags.go:64] FLAG: --provider-id="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450305 4672 flags.go:64] FLAG: --qos-reserved="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450311 4672 flags.go:64] FLAG: --read-only-port="10255" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450318 4672 flags.go:64] FLAG: --register-node="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450324 4672 flags.go:64] FLAG: --register-schedulable="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450330 4672 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450340 4672 flags.go:64] FLAG: --registry-burst="10" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450346 4672 flags.go:64] FLAG: --registry-qps="5" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450352 4672 flags.go:64] FLAG: --reserved-cpus="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450358 4672 flags.go:64] FLAG: --reserved-memory="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450366 4672 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450372 4672 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450378 4672 flags.go:64] FLAG: --rotate-certificates="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450384 4672 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450390 4672 flags.go:64] FLAG: --runonce="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450396 4672 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450435 4672 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450442 4672 flags.go:64] FLAG: --seccomp-default="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450448 4672 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450454 4672 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450460 4672 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450467 4672 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450473 4672 flags.go:64] FLAG: --storage-driver-password="root" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450479 4672 flags.go:64] FLAG: --storage-driver-secure="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450485 4672 flags.go:64] FLAG: --storage-driver-table="stats" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450492 4672 flags.go:64] FLAG: --storage-driver-user="root" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450498 4672 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450505 4672 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450511 4672 flags.go:64] FLAG: --system-cgroups="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450518 4672 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450528 4672 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450534 4672 flags.go:64] FLAG: --tls-cert-file="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450540 4672 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450548 4672 flags.go:64] FLAG: --tls-min-version="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450554 4672 flags.go:64] FLAG: --tls-private-key-file="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450560 4672 flags.go:64] FLAG: --topology-manager-policy="none" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450566 4672 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450572 4672 flags.go:64] FLAG: --topology-manager-scope="container" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450578 4672 flags.go:64] FLAG: --v="2" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450586 4672 flags.go:64] FLAG: --version="false" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450594 4672 flags.go:64] FLAG: --vmodule="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450601 4672 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.450608 4672 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450754 4672 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450770 4672 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450781 4672 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450789 4672 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450797 4672 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450804 4672 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450811 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450819 4672 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450825 4672 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450832 4672 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450838 4672 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450845 4672 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450851 4672 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450857 4672 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450864 4672 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450874 4672 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450881 4672 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450887 4672 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450893 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450898 4672 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450904 4672 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450910 4672 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450915 4672 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450922 4672 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450929 4672 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450937 4672 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450945 4672 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450954 4672 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450962 4672 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450968 4672 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450975 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450981 4672 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450987 4672 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.450994 4672 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451000 4672 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451006 4672 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451033 4672 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451039 4672 feature_gate.go:330] unrecognized feature gate: Example Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451045 4672 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451052 4672 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451059 4672 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451065 4672 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451071 4672 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451077 4672 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451082 4672 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451087 4672 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451093 4672 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451102 4672 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451107 4672 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451112 4672 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451117 4672 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451123 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451128 4672 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451133 4672 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451138 4672 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451143 4672 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451148 4672 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451154 4672 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451159 4672 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451167 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451173 4672 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451178 4672 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451184 4672 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451190 4672 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451196 4672 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451203 4672 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451209 4672 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451216 4672 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451222 4672 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451229 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.451235 4672 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.451255 4672 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.461264 4672 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.461302 4672 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461375 4672 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461384 4672 feature_gate.go:330] unrecognized feature gate: Example Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461388 4672 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461392 4672 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461396 4672 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461401 4672 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461407 4672 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461411 4672 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461417 4672 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461421 4672 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461426 4672 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461430 4672 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461434 4672 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461438 4672 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461443 4672 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461447 4672 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461452 4672 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461455 4672 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461459 4672 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461463 4672 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461466 4672 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461470 4672 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461475 4672 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461479 4672 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461483 4672 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461487 4672 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461491 4672 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461495 4672 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461499 4672 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461503 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461506 4672 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461510 4672 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461514 4672 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461518 4672 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461536 4672 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461539 4672 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461543 4672 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461546 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461550 4672 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461553 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461557 4672 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461561 4672 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461565 4672 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461568 4672 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461572 4672 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461575 4672 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461579 4672 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461583 4672 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461587 4672 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461590 4672 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461595 4672 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461598 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461602 4672 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461606 4672 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461610 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461614 4672 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461617 4672 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461621 4672 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461624 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461628 4672 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461631 4672 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461634 4672 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461638 4672 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461642 4672 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461645 4672 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461649 4672 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461653 4672 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461658 4672 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461661 4672 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461665 4672 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461670 4672 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.461677 4672 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461783 4672 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461790 4672 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461794 4672 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461798 4672 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461802 4672 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461805 4672 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461809 4672 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461812 4672 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461816 4672 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461819 4672 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461823 4672 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461827 4672 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461830 4672 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461834 4672 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461838 4672 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461841 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461845 4672 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461849 4672 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461852 4672 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461856 4672 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461859 4672 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461863 4672 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461866 4672 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461870 4672 feature_gate.go:330] unrecognized feature gate: Example Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461873 4672 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461877 4672 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461880 4672 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461885 4672 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461889 4672 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461895 4672 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461899 4672 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461903 4672 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461907 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461912 4672 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461918 4672 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461922 4672 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461926 4672 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461930 4672 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461934 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461938 4672 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461942 4672 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461946 4672 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461950 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461954 4672 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461958 4672 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461961 4672 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461965 4672 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461968 4672 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461972 4672 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461975 4672 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461980 4672 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461984 4672 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461988 4672 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461992 4672 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.461997 4672 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462001 4672 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462005 4672 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462008 4672 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462032 4672 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462040 4672 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462045 4672 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462050 4672 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462055 4672 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462060 4672 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462064 4672 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462067 4672 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462071 4672 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462074 4672 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462079 4672 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462082 4672 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.462086 4672 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.462092 4672 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.462234 4672 server.go:940] "Client rotation is on, will bootstrap in background" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.466358 4672 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.466454 4672 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.468072 4672 server.go:997] "Starting client certificate rotation" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.468095 4672 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.469252 4672 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-10 00:26:59.161984413 +0000 UTC Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.469369 4672 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2265h38m15.692617046s for next certificate rotation Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.620378 4672 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.641306 4672 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.664279 4672 log.go:25] "Validated CRI v1 runtime API" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.737232 4672 log.go:25] "Validated CRI v1 image API" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.765433 4672 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.774920 4672 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-07-14-44-32-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.774948 4672 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.812441 4672 manager.go:217] Machine: {Timestamp:2025-10-07 14:48:43.787775134 +0000 UTC m=+0.762953735 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:e258f24a-a647-4fb0-b924-1d4075da0e45 BootID:aa7996fa-f726-455b-ba8a-95e8a3b764cf Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:50:8b:66 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:50:8b:66 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:04:d6:3c Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:39:08:e8 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:28:c8:c9 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:e5:26:56 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:da:c5:84:c0:66:4b Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:8a:8b:ae:19:84:70 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.812719 4672 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.812911 4672 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.813308 4672 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.813489 4672 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.813524 4672 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.813752 4672 topology_manager.go:138] "Creating topology manager with none policy" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.813765 4672 container_manager_linux.go:303] "Creating device plugin manager" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.814394 4672 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.814422 4672 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.815481 4672 state_mem.go:36] "Initialized new in-memory state store" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.815566 4672 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.824520 4672 kubelet.go:418] "Attempting to sync node with API server" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.824543 4672 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.826039 4672 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.826105 4672 kubelet.go:324] "Adding apiserver pod source" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.826133 4672 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.832212 4672 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.835178 4672 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.836977 4672 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838810 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838847 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838881 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838890 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838902 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838909 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838917 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838928 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838937 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838945 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838957 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.838964 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.839530 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.839589 4672 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.839596 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.839942 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.840007 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.840120 4672 server.go:1280] "Started kubelet" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.840135 4672 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:43 crc systemd[1]: Started Kubernetes Kubelet. Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.844136 4672 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.844288 4672 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.844777 4672 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.845051 4672 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.845115 4672 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.845432 4672 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 00:02:54.839590698 +0000 UTC Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.845484 4672 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2457h14m10.994109062s for next certificate rotation Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.845641 4672 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.845677 4672 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.845795 4672 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.845627 4672 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.846072 4672 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.122:6443: connect: connection refused" interval="200ms" Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.846571 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.846633 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.847995 4672 factory.go:153] Registering CRI-O factory Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.848051 4672 factory.go:221] Registration of the crio container factory successfully Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.848187 4672 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.848212 4672 factory.go:55] Registering systemd factory Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.848223 4672 factory.go:221] Registration of the systemd container factory successfully Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.848247 4672 factory.go:103] Registering Raw factory Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.848263 4672 manager.go:1196] Started watching for new ooms in manager Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.849005 4672 manager.go:319] Starting recovery of all containers Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.849731 4672 server.go:460] "Adding debug handlers to kubelet server" Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.855104 4672 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.122:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186c3ce889fc0255 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-07 14:48:43.840094805 +0000 UTC m=+0.815273386,LastTimestamp:2025-10-07 14:48:43.840094805 +0000 UTC m=+0.815273386,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861109 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861193 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861211 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861228 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861241 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861254 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861267 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861281 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861297 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861310 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861325 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861341 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861354 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861375 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861389 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861402 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861415 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861428 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861442 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861453 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861468 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861480 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861492 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861531 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861546 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861558 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861571 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861593 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861607 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861620 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861650 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861664 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861678 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861691 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861703 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861714 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861726 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861738 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861749 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861763 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861775 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861788 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861799 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861811 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861825 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861837 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861850 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861862 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861874 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861889 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861902 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861915 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861932 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861945 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861959 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861971 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861984 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.861996 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862008 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862039 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862052 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862065 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862079 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862091 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862105 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862119 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862132 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862143 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862157 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862169 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862182 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862196 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862209 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862221 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862233 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.862246 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864139 4672 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864175 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864194 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864208 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864223 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864243 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864257 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864271 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864284 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864296 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864309 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864323 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864337 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864349 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864365 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864379 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864393 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864407 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864423 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864437 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864456 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864474 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864489 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864504 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864518 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864532 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864547 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864561 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864575 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864595 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864612 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864639 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864658 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864674 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864688 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864702 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864717 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864735 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864751 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864768 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864782 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864795 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864809 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864823 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864839 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864854 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864868 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864882 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864896 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864910 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864924 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864937 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864949 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864966 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864978 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.864991 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865005 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865043 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865057 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865071 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865084 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865099 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865112 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865124 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865137 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865154 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865167 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865182 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865197 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865213 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865228 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865249 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865264 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865279 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865294 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865309 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865322 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865335 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865349 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865363 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865376 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865391 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865428 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865443 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865457 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865483 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865496 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865510 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865524 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865539 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865552 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865566 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865578 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865609 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865622 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865638 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865651 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865665 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865679 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865691 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865704 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865720 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865735 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865748 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865761 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865775 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865787 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865800 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865811 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865824 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865838 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865852 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865866 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865879 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865892 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865905 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865922 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865936 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865949 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865962 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865976 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.865990 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866003 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866035 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866048 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866060 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866074 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866087 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866100 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866113 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866125 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866137 4672 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866149 4672 reconstruct.go:97] "Volume reconstruction finished" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.866158 4672 reconciler.go:26] "Reconciler: start to sync state" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.874798 4672 manager.go:324] Recovery completed Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.888071 4672 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.889721 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.890476 4672 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.890505 4672 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.890530 4672 kubelet.go:2335] "Starting kubelet main sync loop" Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.890787 4672 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.891397 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.891425 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.891438 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.892499 4672 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.892514 4672 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.892530 4672 state_mem.go:36] "Initialized new in-memory state store" Oct 07 14:48:43 crc kubenswrapper[4672]: W1007 14:48:43.892910 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.893025 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.910228 4672 policy_none.go:49] "None policy: Start" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.912118 4672 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.912148 4672 state_mem.go:35] "Initializing new in-memory state store" Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.945968 4672 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.957392 4672 manager.go:334] "Starting Device Plugin manager" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.957452 4672 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.957468 4672 server.go:79] "Starting device plugin registration server" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.957934 4672 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.957953 4672 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.958106 4672 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.958198 4672 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.958212 4672 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 07 14:48:43 crc kubenswrapper[4672]: E1007 14:48:43.968840 4672 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.991198 4672 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.991396 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.994265 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.994303 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.994312 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.994440 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.994664 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.994705 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.998005 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.998123 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.998139 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.998610 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.998648 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.998660 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.998808 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.998949 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.999009 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.999640 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.999674 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.999686 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.999782 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:43 crc kubenswrapper[4672]: I1007 14:48:43.999932 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:43.999975 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000522 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000550 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000559 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000597 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000614 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000624 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000706 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000720 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000726 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000737 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000897 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.000926 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.001336 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.001432 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.001447 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.001624 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.001658 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.001656 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.001769 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.001784 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.002756 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.002780 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.002788 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:44 crc kubenswrapper[4672]: E1007 14:48:44.046953 4672 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.122:6443: connect: connection refused" interval="400ms" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.059000 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.059952 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.059985 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.059994 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.060029 4672 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 14:48:44 crc kubenswrapper[4672]: E1007 14:48:44.060423 4672 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.122:6443: connect: connection refused" node="crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067603 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067635 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067716 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067776 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067796 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067832 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067910 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067942 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067962 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.067987 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.068005 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.068049 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.068069 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.068090 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.068113 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169663 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169720 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169743 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169766 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169794 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169794 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169849 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169892 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169858 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169915 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169959 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169893 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169939 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170008 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.169820 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170078 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170061 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170110 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170122 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170141 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170158 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170174 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170189 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170206 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170212 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170239 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170267 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170287 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170313 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.170338 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.261103 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.262221 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.262258 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.262270 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.262295 4672 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 14:48:44 crc kubenswrapper[4672]: E1007 14:48:44.262742 4672 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.122:6443: connect: connection refused" node="crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.325640 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.333507 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.347764 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.352786 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.356239 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:44 crc kubenswrapper[4672]: W1007 14:48:44.390913 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-bb36b7b4db78bcc876b1a5a494f8fbe339a58d0c390a27480bb1c6b0249a2a04 WatchSource:0}: Error finding container bb36b7b4db78bcc876b1a5a494f8fbe339a58d0c390a27480bb1c6b0249a2a04: Status 404 returned error can't find the container with id bb36b7b4db78bcc876b1a5a494f8fbe339a58d0c390a27480bb1c6b0249a2a04 Oct 07 14:48:44 crc kubenswrapper[4672]: W1007 14:48:44.393937 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-11e2a88a5556fbf22cf2795f721ce23c4c61c08bb602950feef2f24d3daf3cb6 WatchSource:0}: Error finding container 11e2a88a5556fbf22cf2795f721ce23c4c61c08bb602950feef2f24d3daf3cb6: Status 404 returned error can't find the container with id 11e2a88a5556fbf22cf2795f721ce23c4c61c08bb602950feef2f24d3daf3cb6 Oct 07 14:48:44 crc kubenswrapper[4672]: W1007 14:48:44.395559 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-434b4db3252abccf943f0006f30cfbda16a31dda81b5445b1c524abfdda08092 WatchSource:0}: Error finding container 434b4db3252abccf943f0006f30cfbda16a31dda81b5445b1c524abfdda08092: Status 404 returned error can't find the container with id 434b4db3252abccf943f0006f30cfbda16a31dda81b5445b1c524abfdda08092 Oct 07 14:48:44 crc kubenswrapper[4672]: W1007 14:48:44.398674 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-48d542e31db148a40e91fe8c007ea95c2f162ef9d7ef6796b08b233404244d0e WatchSource:0}: Error finding container 48d542e31db148a40e91fe8c007ea95c2f162ef9d7ef6796b08b233404244d0e: Status 404 returned error can't find the container with id 48d542e31db148a40e91fe8c007ea95c2f162ef9d7ef6796b08b233404244d0e Oct 07 14:48:44 crc kubenswrapper[4672]: W1007 14:48:44.401265 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-4022270901f05dd3946d35eefb19a8c1122c13b61df3fff57c33da7b8de93fbe WatchSource:0}: Error finding container 4022270901f05dd3946d35eefb19a8c1122c13b61df3fff57c33da7b8de93fbe: Status 404 returned error can't find the container with id 4022270901f05dd3946d35eefb19a8c1122c13b61df3fff57c33da7b8de93fbe Oct 07 14:48:44 crc kubenswrapper[4672]: E1007 14:48:44.447915 4672 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.122:6443: connect: connection refused" interval="800ms" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.663836 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.664911 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.664938 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.664949 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.664969 4672 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 14:48:44 crc kubenswrapper[4672]: E1007 14:48:44.665426 4672 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.122:6443: connect: connection refused" node="crc" Oct 07 14:48:44 crc kubenswrapper[4672]: W1007 14:48:44.746787 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:44 crc kubenswrapper[4672]: E1007 14:48:44.746883 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:44 crc kubenswrapper[4672]: W1007 14:48:44.750800 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:44 crc kubenswrapper[4672]: E1007 14:48:44.750902 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.841718 4672 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.893608 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4022270901f05dd3946d35eefb19a8c1122c13b61df3fff57c33da7b8de93fbe"} Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.894502 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"48d542e31db148a40e91fe8c007ea95c2f162ef9d7ef6796b08b233404244d0e"} Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.895297 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"434b4db3252abccf943f0006f30cfbda16a31dda81b5445b1c524abfdda08092"} Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.896239 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"11e2a88a5556fbf22cf2795f721ce23c4c61c08bb602950feef2f24d3daf3cb6"} Oct 07 14:48:44 crc kubenswrapper[4672]: I1007 14:48:44.896943 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"bb36b7b4db78bcc876b1a5a494f8fbe339a58d0c390a27480bb1c6b0249a2a04"} Oct 07 14:48:45 crc kubenswrapper[4672]: W1007 14:48:45.037174 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:45 crc kubenswrapper[4672]: E1007 14:48:45.037324 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:45 crc kubenswrapper[4672]: W1007 14:48:45.154525 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:45 crc kubenswrapper[4672]: E1007 14:48:45.154636 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:45 crc kubenswrapper[4672]: E1007 14:48:45.248715 4672 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.122:6443: connect: connection refused" interval="1.6s" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.465735 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.467173 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.467220 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.467236 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.467269 4672 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 14:48:45 crc kubenswrapper[4672]: E1007 14:48:45.467710 4672 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.122:6443: connect: connection refused" node="crc" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.841385 4672 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.900652 4672 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="4427192faf97eb1a9d0f23f1128c244733f94622d34ee70e4a3d5829cbba7d67" exitCode=0 Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.900692 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"4427192faf97eb1a9d0f23f1128c244733f94622d34ee70e4a3d5829cbba7d67"} Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.900746 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.901632 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.901715 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.901725 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.901957 4672 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7" exitCode=0 Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.902038 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.902042 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7"} Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.902733 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.902764 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.902776 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.903364 4672 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5d318857eb030a9350bae607fc73199e67b87fa718e57f8b0a6761acde795caf" exitCode=0 Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.903432 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5d318857eb030a9350bae607fc73199e67b87fa718e57f8b0a6761acde795caf"} Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.903541 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.904127 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.904473 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.904490 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.904498 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906105 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40"} Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906133 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2"} Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906145 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0"} Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906153 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6"} Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906153 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906363 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906388 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906401 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906752 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906781 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.906790 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.907684 4672 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad" exitCode=0 Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.907706 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad"} Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.907768 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.908323 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.908341 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:45 crc kubenswrapper[4672]: I1007 14:48:45.908349 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.329901 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.378723 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:46 crc kubenswrapper[4672]: W1007 14:48:46.534530 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:46 crc kubenswrapper[4672]: E1007 14:48:46.534617 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.840778 4672 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:46 crc kubenswrapper[4672]: E1007 14:48:46.849285 4672 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.122:6443: connect: connection refused" interval="3.2s" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.912992 4672 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a21f85e093a9cedeabb42e14c262d0878d430086101ed8b7ad1a67bd448e1ec3" exitCode=0 Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.913163 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.913295 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a21f85e093a9cedeabb42e14c262d0878d430086101ed8b7ad1a67bd448e1ec3"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.914038 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.914075 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.914088 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.916413 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.916470 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.916486 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.916487 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.917646 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.917694 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.917708 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.918160 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"c5cb78016f9669053f8fa1c0bdb04a9b9b75858ca3b2ed0014a7ae7e387b6d27"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.918176 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.919236 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.919273 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.919286 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.926803 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.926802 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.926869 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.926872 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.927012 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.927050 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.927069 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.927083 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56"} Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.927477 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.927504 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.927513 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.928049 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.928082 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:46 crc kubenswrapper[4672]: I1007 14:48:46.928093 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:46 crc kubenswrapper[4672]: W1007 14:48:46.964041 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:46 crc kubenswrapper[4672]: E1007 14:48:46.964117 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.068058 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.070086 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.070158 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.070171 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.070221 4672 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 14:48:47 crc kubenswrapper[4672]: E1007 14:48:47.070812 4672 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.122:6443: connect: connection refused" node="crc" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.078011 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.162508 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:47 crc kubenswrapper[4672]: W1007 14:48:47.170272 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:47 crc kubenswrapper[4672]: E1007 14:48:47.170366 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.425200 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.425444 4672 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.425546 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": dial tcp 192.168.126.11:6443: connect: connection refused" Oct 07 14:48:47 crc kubenswrapper[4672]: W1007 14:48:47.477561 4672 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.122:6443: connect: connection refused Oct 07 14:48:47 crc kubenswrapper[4672]: E1007 14:48:47.477649 4672 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.122:6443: connect: connection refused" logger="UnhandledError" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.930523 4672 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="36f76d6dfb49d31b4e36e7a00a01ab9980f204b75a6316444630f25fe4b64d52" exitCode=0 Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.930564 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"36f76d6dfb49d31b4e36e7a00a01ab9980f204b75a6316444630f25fe4b64d52"} Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.930625 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.930657 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.930668 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.930675 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.930693 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.930626 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.931890 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.931909 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.931919 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932236 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932260 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932267 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932271 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932285 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932290 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932308 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932319 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932330 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932352 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932363 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:47 crc kubenswrapper[4672]: I1007 14:48:47.932293 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938100 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"700f3e778430dadc7b165a5cf866784910dc3253110f49ab11d48cbbe50d83d5"} Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938148 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"44fe061f885ba9bb76325491dc24eaadd6b6600f6a9bcbe5fd783ade73a41ed7"} Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938160 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f4b736d4e172c63cf20ee6c93a15dc6f44149f897292dcef209ffbbb85b53616"} Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938169 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d943c7a26dd7c5fb12fe8832422ea2f04d636ee120c8d568df5d08d37abf3c08"} Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938177 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"12e53b6b643a6ca9f39c504fed333204aa9f20383089cfb3341ae83f0bac4d10"} Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938204 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938295 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938719 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938752 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.938766 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939078 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939113 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939125 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939461 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939492 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939501 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939726 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939754 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939764 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.939770 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.940079 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:48 crc kubenswrapper[4672]: I1007 14:48:48.940102 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.189773 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.408956 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.941114 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.941140 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.942168 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.942184 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.942201 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.942213 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.942227 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:49 crc kubenswrapper[4672]: I1007 14:48:49.942215 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.143923 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.270908 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.271999 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.272054 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.272064 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.272087 4672 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.783625 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.783860 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.785029 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.785060 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.785070 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.943945 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.945631 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.945668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:50 crc kubenswrapper[4672]: I1007 14:48:50.945747 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:51 crc kubenswrapper[4672]: I1007 14:48:51.359827 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:48:51 crc kubenswrapper[4672]: I1007 14:48:51.360060 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:51 crc kubenswrapper[4672]: I1007 14:48:51.361004 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:51 crc kubenswrapper[4672]: I1007 14:48:51.361049 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:51 crc kubenswrapper[4672]: I1007 14:48:51.361060 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:51 crc kubenswrapper[4672]: I1007 14:48:51.945764 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:51 crc kubenswrapper[4672]: I1007 14:48:51.946524 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:51 crc kubenswrapper[4672]: I1007 14:48:51.946562 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:51 crc kubenswrapper[4672]: I1007 14:48:51.946574 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:52 crc kubenswrapper[4672]: I1007 14:48:52.190634 4672 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 14:48:52 crc kubenswrapper[4672]: I1007 14:48:52.190731 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 14:48:53 crc kubenswrapper[4672]: E1007 14:48:53.968938 4672 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 07 14:48:54 crc kubenswrapper[4672]: I1007 14:48:54.855685 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:48:54 crc kubenswrapper[4672]: I1007 14:48:54.855814 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:48:54 crc kubenswrapper[4672]: I1007 14:48:54.857220 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:48:54 crc kubenswrapper[4672]: I1007 14:48:54.857253 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:48:54 crc kubenswrapper[4672]: I1007 14:48:54.857263 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:48:57 crc kubenswrapper[4672]: I1007 14:48:57.621803 4672 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 07 14:48:57 crc kubenswrapper[4672]: I1007 14:48:57.621862 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 07 14:48:57 crc kubenswrapper[4672]: I1007 14:48:57.626648 4672 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 07 14:48:57 crc kubenswrapper[4672]: I1007 14:48:57.626712 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.172285 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.172435 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.173835 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.173916 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.173933 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.185212 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.968969 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.970458 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.970514 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:00 crc kubenswrapper[4672]: I1007 14:49:00.970525 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.191110 4672 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.191513 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.429707 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.429893 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.431078 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.431119 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.431131 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.433690 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.617151 4672 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.619970 4672 trace.go:236] Trace[1073867792]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 14:48:51.541) (total time: 11078ms): Oct 07 14:49:02 crc kubenswrapper[4672]: Trace[1073867792]: ---"Objects listed" error: 11078ms (14:49:02.619) Oct 07 14:49:02 crc kubenswrapper[4672]: Trace[1073867792]: [11.078441579s] [11.078441579s] END Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.619998 4672 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.620145 4672 trace.go:236] Trace[1125507375]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 14:48:52.358) (total time: 10261ms): Oct 07 14:49:02 crc kubenswrapper[4672]: Trace[1125507375]: ---"Objects listed" error: 10261ms (14:49:02.620) Oct 07 14:49:02 crc kubenswrapper[4672]: Trace[1125507375]: [10.261259995s] [10.261259995s] END Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.620158 4672 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.622345 4672 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.623451 4672 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.623501 4672 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.625930 4672 trace.go:236] Trace[1912825447]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 14:48:51.513) (total time: 11112ms): Oct 07 14:49:02 crc kubenswrapper[4672]: Trace[1912825447]: ---"Objects listed" error: 11111ms (14:49:02.625) Oct 07 14:49:02 crc kubenswrapper[4672]: Trace[1912825447]: [11.112095516s] [11.112095516s] END Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.625967 4672 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.839245 4672 apiserver.go:52] "Watching apiserver" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.843115 4672 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.843425 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-zgwqx"] Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.843735 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.843778 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.843838 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.843960 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.844492 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.844526 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.844794 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-zgwqx" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.844868 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.844868 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.844905 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.846395 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.846714 4672 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.847185 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.847317 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.847631 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.847691 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.847817 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.848319 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.848480 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.848572 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.848724 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.850810 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.850981 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.869979 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.880354 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.892378 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.904955 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.918052 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926117 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926165 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926189 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926215 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926239 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926261 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926284 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926311 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926346 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926369 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926417 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926442 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926465 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926487 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926517 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926522 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926542 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926566 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926590 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926614 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926636 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926660 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926688 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926709 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926759 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.926760 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927033 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927061 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927069 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927083 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927106 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927179 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927212 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927364 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927414 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927421 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927459 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927567 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927625 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927665 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927702 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927738 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927777 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927863 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927912 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927954 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928043 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928093 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928131 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928169 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928202 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928243 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928286 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928323 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928360 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928398 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928445 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928481 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928524 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928561 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928596 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928631 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928664 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928698 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928731 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928834 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928869 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928901 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928936 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928974 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929066 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929105 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929145 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929197 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929235 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929273 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929318 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927432 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929356 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929394 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929429 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929465 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929505 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929538 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929571 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929684 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929723 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929758 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929790 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929822 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929860 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929892 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929928 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929960 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929989 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.930073 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.930105 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.930142 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.931715 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.931752 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.931782 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.931848 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.931879 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.931904 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.931928 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.931954 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.931979 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932034 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932061 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932088 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932118 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932142 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932198 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932226 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932253 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932277 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932304 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932329 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932360 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932386 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932411 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932442 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932478 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932512 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932547 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932584 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932619 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932654 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932688 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932728 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932766 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932800 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932836 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932870 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932896 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932920 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932948 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932975 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933002 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933050 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933074 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933101 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933134 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933169 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933208 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933242 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933276 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933308 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933343 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933378 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933420 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933454 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933488 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933519 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933551 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933581 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933617 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933653 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933693 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933730 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933768 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933805 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933842 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933877 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933914 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933952 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.933995 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934057 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934094 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934130 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934166 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934199 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934240 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934274 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934324 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934359 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934398 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934437 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934470 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934505 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934540 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934575 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934692 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934823 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934863 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934905 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.934944 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935076 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935229 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935271 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935316 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935356 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935393 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935432 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935467 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935495 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935519 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935546 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935574 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935599 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935624 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935655 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935744 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935817 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flv77\" (UniqueName: \"kubernetes.io/projected/17547458-b00b-4f76-8399-374b637285f6-kube-api-access-flv77\") pod \"node-resolver-zgwqx\" (UID: \"17547458-b00b-4f76-8399-374b637285f6\") " pod="openshift-dns/node-resolver-zgwqx" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935864 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935908 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.935957 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/17547458-b00b-4f76-8399-374b637285f6-hosts-file\") pod \"node-resolver-zgwqx\" (UID: \"17547458-b00b-4f76-8399-374b637285f6\") " pod="openshift-dns/node-resolver-zgwqx" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936049 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936095 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936134 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936173 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936216 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936264 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936307 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936344 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936384 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936422 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936464 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936622 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936725 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936752 4672 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936779 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936812 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936831 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936852 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.936882 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927555 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.943168 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.944083 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.944372 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.944453 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.944528 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.944587 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.944817 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927818 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927883 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927894 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928047 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928066 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928088 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928292 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928311 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928334 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928362 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928395 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928434 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928496 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.928675 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929213 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.929317 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.932808 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.944850 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.945111 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.945369 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.945453 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.945533 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.945555 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.945823 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.945888 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.946001 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.946257 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.946373 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.946408 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.927683 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.946608 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.946770 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.946828 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.946996 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.947055 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.947227 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.947427 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.947465 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.947479 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.947781 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.947729 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.948515 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.948587 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.948729 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.948757 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.948769 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.944274 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.948966 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.949154 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.949180 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.949426 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.949445 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.949707 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.950198 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.950385 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.951281 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.951379 4672 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.951432 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:03.451416494 +0000 UTC m=+20.426595075 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.951448 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.951759 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.952074 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.952847 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.952962 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:49:03.452951326 +0000 UTC m=+20.428129897 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.953010 4672 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.953091 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.953158 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.953656 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.954188 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.954225 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.954314 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.954333 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.954734 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.954798 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.954860 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.955086 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.955413 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.956290 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.957442 4672 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.957539 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:03.457517212 +0000 UTC m=+20.432695993 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.958828 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.965797 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.965833 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.965846 4672 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.965913 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:03.465893627 +0000 UTC m=+20.441072208 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.966455 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.966487 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.966556 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.966895 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.968000 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.968275 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.968473 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.968700 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.969212 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.969397 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.970146 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.972040 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.972477 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.973163 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.973193 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.973209 4672 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:02 crc kubenswrapper[4672]: E1007 14:49:02.973268 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:03.473248678 +0000 UTC m=+20.448427259 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.973345 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.973210 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.973427 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.974483 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.974511 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.975347 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.979972 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.983040 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.983444 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.983648 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.983666 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.983680 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.983728 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.983900 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.984413 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.985053 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.985736 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.985902 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.986466 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.986657 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.986856 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.987434 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.988282 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.994123 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:02 crc kubenswrapper[4672]: I1007 14:49:02.994284 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.016765 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.017122 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.018301 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.018439 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.018567 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.019483 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.019630 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.019662 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.019804 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.019833 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.019925 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.021356 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.021542 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.023405 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.023499 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.024899 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.025093 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.025167 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.025164 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.025432 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.027606 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.027662 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.027683 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.027907 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.028196 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.028215 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.028200 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.027629 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.028325 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.028486 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.028518 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.028719 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.029007 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.029137 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.029260 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.029359 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.028622 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.029426 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.029441 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.029664 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.029876 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.030071 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.030680 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.031212 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.031418 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.031861 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.032382 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.032408 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.032999 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.036075 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.036436 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.037007 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.037358 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 14:49:03 crc kubenswrapper[4672]: W1007 14:49:03.037430 4672 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~projected/kube-api-access-qs4fp Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.037439 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.037555 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.037755 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/17547458-b00b-4f76-8399-374b637285f6-hosts-file\") pod \"node-resolver-zgwqx\" (UID: \"17547458-b00b-4f76-8399-374b637285f6\") " pod="openshift-dns/node-resolver-zgwqx" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.037763 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/17547458-b00b-4f76-8399-374b637285f6-hosts-file\") pod \"node-resolver-zgwqx\" (UID: \"17547458-b00b-4f76-8399-374b637285f6\") " pod="openshift-dns/node-resolver-zgwqx" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.037838 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.037907 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.037931 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038090 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038235 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flv77\" (UniqueName: \"kubernetes.io/projected/17547458-b00b-4f76-8399-374b637285f6-kube-api-access-flv77\") pod \"node-resolver-zgwqx\" (UID: \"17547458-b00b-4f76-8399-374b637285f6\") " pod="openshift-dns/node-resolver-zgwqx" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038336 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038348 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038357 4672 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038368 4672 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038377 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038385 4672 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038394 4672 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038402 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038410 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038418 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038427 4672 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038438 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038448 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038457 4672 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038466 4672 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038474 4672 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038482 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038492 4672 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038500 4672 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038508 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038516 4672 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038524 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038534 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038542 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038552 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038560 4672 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038569 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038577 4672 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038585 4672 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038593 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038601 4672 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038610 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038621 4672 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038634 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038645 4672 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038655 4672 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038665 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038675 4672 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038684 4672 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038694 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038704 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038715 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038727 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038737 4672 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038776 4672 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038787 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038796 4672 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038804 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038814 4672 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038822 4672 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038831 4672 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038854 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038863 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038871 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038879 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038888 4672 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038896 4672 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038904 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038912 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038921 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038929 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038937 4672 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038945 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038952 4672 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038960 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038968 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038978 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038986 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.038994 4672 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039002 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039039 4672 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039048 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039059 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039068 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039097 4672 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039105 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039113 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039122 4672 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039129 4672 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039137 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039145 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039153 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039161 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039169 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039179 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039186 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039196 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039205 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039218 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039226 4672 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039235 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039243 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039251 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039259 4672 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039268 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039276 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039284 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039292 4672 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039301 4672 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039310 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039318 4672 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039326 4672 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039335 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039343 4672 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039351 4672 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039359 4672 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039368 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039375 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039383 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039391 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039401 4672 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039410 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039418 4672 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039414 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039428 4672 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039443 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039469 4672 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039486 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039520 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039536 4672 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039546 4672 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039557 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039567 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039577 4672 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039588 4672 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039607 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039597 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039617 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039519 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039630 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039642 4672 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039652 4672 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039661 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039672 4672 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039682 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039691 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039700 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039708 4672 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039717 4672 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039725 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039734 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039743 4672 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039751 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039759 4672 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039768 4672 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039776 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039784 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039779 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039793 4672 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039824 4672 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039841 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039856 4672 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039868 4672 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039938 4672 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039953 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039977 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.039990 4672 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040005 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040035 4672 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040048 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040061 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040073 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040086 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040089 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040098 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040143 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040158 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040148 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040169 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040323 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040421 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.040784 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.041117 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.041307 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.041436 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.042058 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.042340 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.042880 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.042916 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.043176 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.043225 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.043339 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.048330 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.048555 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.049006 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.049163 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.049858 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.060634 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.062341 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flv77\" (UniqueName: \"kubernetes.io/projected/17547458-b00b-4f76-8399-374b637285f6-kube-api-access-flv77\") pod \"node-resolver-zgwqx\" (UID: \"17547458-b00b-4f76-8399-374b637285f6\") " pod="openshift-dns/node-resolver-zgwqx" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.067441 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.075093 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.085534 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.093879 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.105139 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.114550 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.123104 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.134004 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141356 4672 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141392 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141405 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141419 4672 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141433 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141443 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141453 4672 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141464 4672 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141474 4672 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141486 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141497 4672 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141508 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141520 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141531 4672 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141558 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141569 4672 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141580 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141590 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141603 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141613 4672 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141624 4672 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141634 4672 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141645 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141655 4672 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141666 4672 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.141678 4672 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.159170 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.170007 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.177419 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.181441 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-zgwqx" Oct 07 14:49:03 crc kubenswrapper[4672]: W1007 14:49:03.189647 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-8d8f55d66ad151706400f0d7369a0ede3cb9ebba70c71dbdd835443b3e310cbd WatchSource:0}: Error finding container 8d8f55d66ad151706400f0d7369a0ede3cb9ebba70c71dbdd835443b3e310cbd: Status 404 returned error can't find the container with id 8d8f55d66ad151706400f0d7369a0ede3cb9ebba70c71dbdd835443b3e310cbd Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.545066 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.545159 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.545191 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.545222 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.545246 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545313 4672 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545392 4672 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545397 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545434 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545449 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:04.545424471 +0000 UTC m=+21.520603052 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545452 4672 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545473 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:49:04.545461072 +0000 UTC m=+21.520639653 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545508 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545522 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:04.545487193 +0000 UTC m=+21.520665834 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545538 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545558 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:04.545550035 +0000 UTC m=+21.520728706 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545562 4672 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.545625 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:04.545607397 +0000 UTC m=+21.520785978 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.847725 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-lk2x7"] Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.848211 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.850889 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.851103 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.851377 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-mklmj"] Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.851379 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.851686 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.854916 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.854947 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.856461 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.856525 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.859748 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.860362 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.867477 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.875282 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.885639 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.895433 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.895967 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.896551 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.897130 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.897734 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.898664 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.899154 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.899710 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.900712 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.901305 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.902215 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.902771 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.904039 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.904539 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.904793 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.905061 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.905938 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.906493 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.907489 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.907847 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.908406 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.909359 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.909936 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.910950 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.911435 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.912430 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.912827 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.913397 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.914420 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.914890 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.915668 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:03Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.916310 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.916949 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.917973 4672 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.918152 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.919851 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.921083 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.921555 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.923003 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.923760 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.924866 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.925544 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.925902 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:03Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.926667 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.927249 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.928235 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.928930 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.929936 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.930397 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.931402 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.931902 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.933014 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.933497 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.934381 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.934874 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.935859 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.936551 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.937046 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.938712 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:03Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.944738 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-l8k8z"] Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.945117 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-l8k8z" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.946623 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-7n8j7"] Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.947483 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948075 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/42ada921-288b-41fa-a167-6c9b5b8be19c-serviceca\") pod \"node-ca-lk2x7\" (UID: \"42ada921-288b-41fa-a167-6c9b5b8be19c\") " pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948106 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948133 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtk9k\" (UniqueName: \"kubernetes.io/projected/42ada921-288b-41fa-a167-6c9b5b8be19c-kube-api-access-gtk9k\") pod \"node-ca-lk2x7\" (UID: \"42ada921-288b-41fa-a167-6c9b5b8be19c\") " pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948157 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/492d7244-71b2-4f06-bb99-2f4069a8198c-proxy-tls\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948179 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/492d7244-71b2-4f06-bb99-2f4069a8198c-mcd-auth-proxy-config\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948202 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/492d7244-71b2-4f06-bb99-2f4069a8198c-rootfs\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948231 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/42ada921-288b-41fa-a167-6c9b5b8be19c-host\") pod \"node-ca-lk2x7\" (UID: \"42ada921-288b-41fa-a167-6c9b5b8be19c\") " pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948261 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk58h\" (UniqueName: \"kubernetes.io/projected/492d7244-71b2-4f06-bb99-2f4069a8198c-kube-api-access-lk58h\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948305 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948496 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.948765 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.949814 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.950009 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.955466 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.955635 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-mfxdl"] Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.956186 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:03 crc kubenswrapper[4672]: E1007 14:49:03.956250 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.957238 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:03Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.977237 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8d8f55d66ad151706400f0d7369a0ede3cb9ebba70c71dbdd835443b3e310cbd"} Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.978730 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f"} Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.978758 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5"} Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.978768 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a168715648e2604e2c6c56bc83eee67d8db190cca54d4a8bb99bcc8ceb968848"} Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.980278 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-zgwqx" event={"ID":"17547458-b00b-4f76-8399-374b637285f6","Type":"ContainerStarted","Data":"396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9"} Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.980312 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-zgwqx" event={"ID":"17547458-b00b-4f76-8399-374b637285f6","Type":"ContainerStarted","Data":"0a7795236955c2585f4c8b29e305f1825e5e4374982f4b0e7915401c0ca691c5"} Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.980770 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:03Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.986365 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4"} Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.986399 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0157de7e7b6f5f92d99ecfaffe3aca61d4b8fbff40244205fdc76440237e1031"} Oct 07 14:49:03 crc kubenswrapper[4672]: I1007 14:49:03.994900 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:03Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.010591 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.036818 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.045947 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2bqr7"] Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.046815 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049305 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-var-lib-cni-multus\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049345 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-tuning-conf-dir\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049371 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-cni-dir\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049395 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-cni-binary-copy\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049418 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-hostroot\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049439 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgg9h\" (UniqueName: \"kubernetes.io/projected/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-kube-api-access-zgg9h\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049469 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk58h\" (UniqueName: \"kubernetes.io/projected/492d7244-71b2-4f06-bb99-2f4069a8198c-kube-api-access-lk58h\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049492 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049509 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/35368809-f456-4e20-8b5f-25442aca1cac-cni-binary-copy\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049542 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/42ada921-288b-41fa-a167-6c9b5b8be19c-serviceca\") pod \"node-ca-lk2x7\" (UID: \"42ada921-288b-41fa-a167-6c9b5b8be19c\") " pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049561 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-system-cni-dir\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049584 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtk9k\" (UniqueName: \"kubernetes.io/projected/42ada921-288b-41fa-a167-6c9b5b8be19c-kube-api-access-gtk9k\") pod \"node-ca-lk2x7\" (UID: \"42ada921-288b-41fa-a167-6c9b5b8be19c\") " pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049601 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/492d7244-71b2-4f06-bb99-2f4069a8198c-proxy-tls\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049619 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/492d7244-71b2-4f06-bb99-2f4069a8198c-mcd-auth-proxy-config\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049635 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-run-netns\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049649 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-os-release\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049664 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-cnibin\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049681 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/492d7244-71b2-4f06-bb99-2f4069a8198c-rootfs\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049698 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8j4z\" (UniqueName: \"kubernetes.io/projected/3bab8ff7-6484-479d-9423-0ce0c8f7beff-kube-api-access-g8j4z\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049724 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049761 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/492d7244-71b2-4f06-bb99-2f4069a8198c-rootfs\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049728 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-run-k8s-cni-cncf-io\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049835 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.049838 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-var-lib-cni-bin\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050055 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/42ada921-288b-41fa-a167-6c9b5b8be19c-host\") pod \"node-ca-lk2x7\" (UID: \"42ada921-288b-41fa-a167-6c9b5b8be19c\") " pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050090 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050090 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/35368809-f456-4e20-8b5f-25442aca1cac-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050124 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-system-cni-dir\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050149 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-var-lib-kubelet\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050163 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/42ada921-288b-41fa-a167-6c9b5b8be19c-host\") pod \"node-ca-lk2x7\" (UID: \"42ada921-288b-41fa-a167-6c9b5b8be19c\") " pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050172 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-run-multus-certs\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050229 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-socket-dir-parent\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050254 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-os-release\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050278 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld2v5\" (UniqueName: \"kubernetes.io/projected/35368809-f456-4e20-8b5f-25442aca1cac-kube-api-access-ld2v5\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050307 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-cnibin\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050328 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-daemon-config\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050352 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-conf-dir\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050382 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-etc-kubernetes\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050501 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/492d7244-71b2-4f06-bb99-2f4069a8198c-mcd-auth-proxy-config\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.050630 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.051825 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.052179 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.052234 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.052302 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.052179 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/42ada921-288b-41fa-a167-6c9b5b8be19c-serviceca\") pod \"node-ca-lk2x7\" (UID: \"42ada921-288b-41fa-a167-6c9b5b8be19c\") " pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.059528 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/492d7244-71b2-4f06-bb99-2f4069a8198c-proxy-tls\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.064628 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk58h\" (UniqueName: \"kubernetes.io/projected/492d7244-71b2-4f06-bb99-2f4069a8198c-kube-api-access-lk58h\") pod \"machine-config-daemon-mklmj\" (UID: \"492d7244-71b2-4f06-bb99-2f4069a8198c\") " pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.066733 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtk9k\" (UniqueName: \"kubernetes.io/projected/42ada921-288b-41fa-a167-6c9b5b8be19c-kube-api-access-gtk9k\") pod \"node-ca-lk2x7\" (UID: \"42ada921-288b-41fa-a167-6c9b5b8be19c\") " pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.067674 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.084426 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.100845 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.112894 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.123330 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.135418 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.146636 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151613 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-run-netns\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151651 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-os-release\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151668 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-cnibin\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151687 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-netns\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151706 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-systemd\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151729 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-ovn\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151751 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8j4z\" (UniqueName: \"kubernetes.io/projected/3bab8ff7-6484-479d-9423-0ce0c8f7beff-kube-api-access-g8j4z\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151793 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-run-netns\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151809 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-run-k8s-cni-cncf-io\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151843 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-run-k8s-cni-cncf-io\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151843 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-cnibin\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151883 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-var-lib-cni-bin\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151927 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/35368809-f456-4e20-8b5f-25442aca1cac-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151967 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-var-lib-cni-bin\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.151974 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-os-release\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152038 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-openvswitch\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152122 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-system-cni-dir\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152161 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-var-lib-kubelet\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152180 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-run-multus-certs\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152203 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-run-multus-certs\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152222 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-var-lib-kubelet\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152231 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-slash\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152284 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-system-cni-dir\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152365 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-socket-dir-parent\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152441 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-os-release\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152462 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-socket-dir-parent\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152526 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld2v5\" (UniqueName: \"kubernetes.io/projected/35368809-f456-4e20-8b5f-25442aca1cac-kube-api-access-ld2v5\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152567 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-os-release\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152617 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-node-log\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152663 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-ovn-kubernetes\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152783 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-cnibin\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152812 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-daemon-config\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152832 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-config\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152854 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-etc-openvswitch\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152872 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-log-socket\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152892 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-script-lib\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152893 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/35368809-f456-4e20-8b5f-25442aca1cac-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152928 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-conf-dir\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152938 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-cnibin\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152956 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-etc-kubernetes\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152950 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-conf-dir\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.152990 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-var-lib-cni-multus\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153000 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-etc-kubernetes\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153033 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-cni-dir\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153098 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-cni-binary-copy\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153068 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-host-var-lib-cni-multus\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153115 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-hostroot\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153197 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgg9h\" (UniqueName: \"kubernetes.io/projected/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-kube-api-access-zgg9h\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153225 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-tuning-conf-dir\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153249 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-systemd-units\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153268 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-cni-dir\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153139 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-hostroot\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153272 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-var-lib-openvswitch\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153332 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-kubelet\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153364 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-bin\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153390 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-netd\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153425 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153444 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/35368809-f456-4e20-8b5f-25442aca1cac-cni-binary-copy\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153461 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-env-overrides\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153492 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-system-cni-dir\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.153555 4672 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153570 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153597 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-system-cni-dir\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153603 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/be16af59-c2d0-4922-803f-bf1544dd0973-ovn-node-metrics-cert\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153611 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-multus-daemon-config\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.153624 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs podName:3bab8ff7-6484-479d-9423-0ce0c8f7beff nodeName:}" failed. No retries permitted until 2025-10-07 14:49:04.653605687 +0000 UTC m=+21.628784358 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs") pod "network-metrics-daemon-mfxdl" (UID: "3bab8ff7-6484-479d-9423-0ce0c8f7beff") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153642 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sps29\" (UniqueName: \"kubernetes.io/projected/be16af59-c2d0-4922-803f-bf1544dd0973-kube-api-access-sps29\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.153754 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-cni-binary-copy\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.154034 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/35368809-f456-4e20-8b5f-25442aca1cac-cni-binary-copy\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.154128 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/35368809-f456-4e20-8b5f-25442aca1cac-tuning-conf-dir\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.157883 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.160215 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-lk2x7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.168336 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.168744 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgg9h\" (UniqueName: \"kubernetes.io/projected/a0a0e29e-f4b1-4573-b5a7-3dc297f92a62-kube-api-access-zgg9h\") pod \"multus-l8k8z\" (UID: \"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\") " pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.174956 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld2v5\" (UniqueName: \"kubernetes.io/projected/35368809-f456-4e20-8b5f-25442aca1cac-kube-api-access-ld2v5\") pod \"multus-additional-cni-plugins-7n8j7\" (UID: \"35368809-f456-4e20-8b5f-25442aca1cac\") " pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.175193 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8j4z\" (UniqueName: \"kubernetes.io/projected/3bab8ff7-6484-479d-9423-0ce0c8f7beff-kube-api-access-g8j4z\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.183928 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.196729 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.209494 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.220839 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.234366 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.246195 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254601 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-netns\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254642 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-systemd\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254667 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-ovn\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254687 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-openvswitch\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254704 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-slash\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254704 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-netns\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254725 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-systemd\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254719 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-ovn-kubernetes\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254779 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-ovn\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254790 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-openvswitch\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254757 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-ovn-kubernetes\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254809 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-node-log\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254817 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-slash\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254834 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-config\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254855 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-etc-openvswitch\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254856 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-node-log\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254872 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-log-socket\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254889 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-script-lib\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254905 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-etc-openvswitch\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254929 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-kubelet\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254940 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-log-socket\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254946 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-systemd-units\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.254987 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-var-lib-openvswitch\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255005 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-bin\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255058 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-netd\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255087 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-env-overrides\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255124 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/be16af59-c2d0-4922-803f-bf1544dd0973-ovn-node-metrics-cert\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255166 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255210 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sps29\" (UniqueName: \"kubernetes.io/projected/be16af59-c2d0-4922-803f-bf1544dd0973-kube-api-access-sps29\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255558 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-config\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255597 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-bin\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255599 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-script-lib\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255621 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-kubelet\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255636 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-netd\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255650 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-systemd-units\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255665 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-var-lib-openvswitch\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.255691 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.256570 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-env-overrides\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.259269 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.262254 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-l8k8z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.263748 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/be16af59-c2d0-4922-803f-bf1544dd0973-ovn-node-metrics-cert\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.272267 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sps29\" (UniqueName: \"kubernetes.io/projected/be16af59-c2d0-4922-803f-bf1544dd0973-kube-api-access-sps29\") pod \"ovnkube-node-2bqr7\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.272732 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.286445 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.289035 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: W1007 14:49:04.302354 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35368809_f456_4e20_8b5f_25442aca1cac.slice/crio-003bd46a18fd2d2fb6483b7504ea2291a38d6df08b47495e0553003edebce94d WatchSource:0}: Error finding container 003bd46a18fd2d2fb6483b7504ea2291a38d6df08b47495e0553003edebce94d: Status 404 returned error can't find the container with id 003bd46a18fd2d2fb6483b7504ea2291a38d6df08b47495e0553003edebce94d Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.302609 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.316034 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.325511 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.337900 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.347750 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.361143 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.363351 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.375082 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.385210 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.396467 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.417737 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.432755 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.445046 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.459452 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.474273 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.485603 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.500176 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.512957 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.527247 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.538625 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.557759 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.557877 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.557902 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.557933 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.557953 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558096 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558098 4672 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558112 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558124 4672 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558160 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:06.55814705 +0000 UTC m=+23.533325631 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558173 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:06.558167191 +0000 UTC m=+23.533345772 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558176 4672 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558182 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558306 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558318 4672 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558276 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:06.558254803 +0000 UTC m=+23.533433454 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558382 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:49:06.558368697 +0000 UTC m=+23.533547368 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.558447 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:06.558437989 +0000 UTC m=+23.533616670 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.658592 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.658745 4672 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.658789 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs podName:3bab8ff7-6484-479d-9423-0ce0c8f7beff nodeName:}" failed. No retries permitted until 2025-10-07 14:49:05.658776055 +0000 UTC m=+22.633954636 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs") pod "network-metrics-daemon-mfxdl" (UID: "3bab8ff7-6484-479d-9423-0ce0c8f7beff") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.890907 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.890944 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.890921 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.891070 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.891180 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:04 crc kubenswrapper[4672]: E1007 14:49:04.891271 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.987297 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716" exitCode=0 Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.987378 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.987451 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"1a5bc11e36d8181a783611345a15c4004812c1758a0a8453ad3d1ae6c6ab0ed6"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.989462 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.989516 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.989532 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"d4125982f4a2107d16ff2633804cfe85f5dd75c21e9f76d91304f35410f921cf"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.991372 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-lk2x7" event={"ID":"42ada921-288b-41fa-a167-6c9b5b8be19c","Type":"ContainerStarted","Data":"6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.991427 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-lk2x7" event={"ID":"42ada921-288b-41fa-a167-6c9b5b8be19c","Type":"ContainerStarted","Data":"e57a2610291956739a4033cf9ecdfb6b54ca99f15931df760c5d046677ff2a77"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.993273 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l8k8z" event={"ID":"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62","Type":"ContainerStarted","Data":"04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.993341 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l8k8z" event={"ID":"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62","Type":"ContainerStarted","Data":"e4ad0d8af6d30bc8f149ca7252af8dd07d75b55c7e6ed0d5f7d4d63c0c2ec7ab"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.995085 4672 generic.go:334] "Generic (PLEG): container finished" podID="35368809-f456-4e20-8b5f-25442aca1cac" containerID="8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5" exitCode=0 Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.995165 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" event={"ID":"35368809-f456-4e20-8b5f-25442aca1cac","Type":"ContainerDied","Data":"8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5"} Oct 07 14:49:04 crc kubenswrapper[4672]: I1007 14:49:04.995210 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" event={"ID":"35368809-f456-4e20-8b5f-25442aca1cac","Type":"ContainerStarted","Data":"003bd46a18fd2d2fb6483b7504ea2291a38d6df08b47495e0553003edebce94d"} Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.001408 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.014445 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.035863 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.058964 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.081177 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.102403 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.122728 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.146931 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.194412 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.234660 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.260197 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.290206 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.308194 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.319117 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.340526 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.359004 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.377001 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.390752 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.405722 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.422873 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.438838 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.453005 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.469098 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.495504 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.515707 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.529528 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.548640 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.589595 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:05Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.668250 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:05 crc kubenswrapper[4672]: E1007 14:49:05.668386 4672 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:05 crc kubenswrapper[4672]: E1007 14:49:05.668442 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs podName:3bab8ff7-6484-479d-9423-0ce0c8f7beff nodeName:}" failed. No retries permitted until 2025-10-07 14:49:07.668425429 +0000 UTC m=+24.643604010 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs") pod "network-metrics-daemon-mfxdl" (UID: "3bab8ff7-6484-479d-9423-0ce0c8f7beff") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:05 crc kubenswrapper[4672]: I1007 14:49:05.891100 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:05 crc kubenswrapper[4672]: E1007 14:49:05.891748 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.002496 4672 generic.go:334] "Generic (PLEG): container finished" podID="35368809-f456-4e20-8b5f-25442aca1cac" containerID="ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce" exitCode=0 Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.002557 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" event={"ID":"35368809-f456-4e20-8b5f-25442aca1cac","Type":"ContainerDied","Data":"ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce"} Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.009179 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.009223 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.009235 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.009244 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.009257 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.010670 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897"} Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.015549 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.034001 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.048857 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.084283 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.123529 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.140635 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.152926 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.166759 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.181081 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.196338 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.209167 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.222181 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.245621 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.266729 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.281175 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.293939 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.303864 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.314926 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.351680 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.387732 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.428929 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.472936 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.510517 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.549168 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.581724 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.581841 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.581866 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.581897 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.581916 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582040 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582055 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582053 4672 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582136 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:49:10.582114925 +0000 UTC m=+27.557293556 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582141 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582179 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582158 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:10.582150036 +0000 UTC m=+27.557328617 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582194 4672 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582066 4672 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582053 4672 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582250 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:10.582232188 +0000 UTC m=+27.557410839 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582272 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:10.582263769 +0000 UTC m=+27.557442450 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.582310 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:10.58230143 +0000 UTC m=+27.557480091 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.590365 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.632498 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.670547 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.707781 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:06Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.890933 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.890928 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.891062 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:06 crc kubenswrapper[4672]: I1007 14:49:06.890936 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.891141 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:06 crc kubenswrapper[4672]: E1007 14:49:06.891218 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.015010 4672 generic.go:334] "Generic (PLEG): container finished" podID="35368809-f456-4e20-8b5f-25442aca1cac" containerID="900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889" exitCode=0 Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.015049 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" event={"ID":"35368809-f456-4e20-8b5f-25442aca1cac","Type":"ContainerDied","Data":"900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889"} Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.019436 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.027784 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.036549 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.049000 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.069359 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.087135 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.098161 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.116959 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.132219 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.146322 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.162129 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.174469 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.189401 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.234287 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.267619 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:07Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.690680 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:07 crc kubenswrapper[4672]: E1007 14:49:07.690885 4672 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:07 crc kubenswrapper[4672]: E1007 14:49:07.690971 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs podName:3bab8ff7-6484-479d-9423-0ce0c8f7beff nodeName:}" failed. No retries permitted until 2025-10-07 14:49:11.690932218 +0000 UTC m=+28.666110799 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs") pod "network-metrics-daemon-mfxdl" (UID: "3bab8ff7-6484-479d-9423-0ce0c8f7beff") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:07 crc kubenswrapper[4672]: I1007 14:49:07.890801 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:07 crc kubenswrapper[4672]: E1007 14:49:07.890934 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.024646 4672 generic.go:334] "Generic (PLEG): container finished" podID="35368809-f456-4e20-8b5f-25442aca1cac" containerID="55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e" exitCode=0 Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.024686 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" event={"ID":"35368809-f456-4e20-8b5f-25442aca1cac","Type":"ContainerDied","Data":"55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e"} Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.037853 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.048915 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.064169 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.083649 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.100466 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.111799 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.124998 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.139657 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.152882 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.167072 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.177441 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.189369 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.200936 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.212355 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:08Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.891363 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.891416 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:08 crc kubenswrapper[4672]: I1007 14:49:08.891462 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:08 crc kubenswrapper[4672]: E1007 14:49:08.891499 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:08 crc kubenswrapper[4672]: E1007 14:49:08.891626 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:08 crc kubenswrapper[4672]: E1007 14:49:08.891766 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.023107 4672 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.024887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.024919 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.024930 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.025073 4672 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.032242 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.035132 4672 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.035390 4672 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.037223 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.037279 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.037297 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.037322 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.037343 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.038704 4672 generic.go:334] "Generic (PLEG): container finished" podID="35368809-f456-4e20-8b5f-25442aca1cac" containerID="f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0" exitCode=0 Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.038750 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" event={"ID":"35368809-f456-4e20-8b5f-25442aca1cac","Type":"ContainerDied","Data":"f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.050244 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: E1007 14:49:09.051478 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.054683 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.054711 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.054722 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.054739 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.054751 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.063958 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: E1007 14:49:09.071067 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.074255 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.074294 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.074303 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.074339 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.074351 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.076961 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: E1007 14:49:09.086629 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.091084 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.091232 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.091294 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.091384 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.091491 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.095864 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: E1007 14:49:09.104093 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.107624 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.108272 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.108296 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.108304 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.108318 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.108328 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: E1007 14:49:09.119739 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: E1007 14:49:09.119869 4672 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.121591 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.121627 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.121645 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.121665 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.121676 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.123135 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.133470 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.149202 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.165935 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.181585 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.193457 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.193604 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.197072 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.202877 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.207943 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.219834 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.223205 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.223243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.223254 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.223270 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.223283 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.230986 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.242690 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.255942 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.269375 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.282001 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.299215 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.308183 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.321711 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.325769 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.325805 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.325813 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.325827 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.325838 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.334594 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.345548 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.355205 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.366550 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.376711 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.385251 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.396759 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.416243 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:09Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.427875 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.427913 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.427924 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.427939 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.427954 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.529983 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.530031 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.530044 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.530060 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.530072 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.633467 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.633509 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.633518 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.633534 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.633545 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.735586 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.735625 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.735634 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.735647 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.735659 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.838212 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.838260 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.838270 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.838284 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.838294 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.890818 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:09 crc kubenswrapper[4672]: E1007 14:49:09.890963 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.940251 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.940301 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.940314 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.940331 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:09 crc kubenswrapper[4672]: I1007 14:49:09.940340 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:09Z","lastTransitionTime":"2025-10-07T14:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.041873 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.041922 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.041934 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.041949 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.041960 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.045141 4672 generic.go:334] "Generic (PLEG): container finished" podID="35368809-f456-4e20-8b5f-25442aca1cac" containerID="7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381" exitCode=0 Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.045214 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" event={"ID":"35368809-f456-4e20-8b5f-25442aca1cac","Type":"ContainerDied","Data":"7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.061302 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.085167 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.102144 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.114960 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.129248 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.140921 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.149592 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.149633 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.149643 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.149661 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.149673 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.154318 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.167056 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.179261 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.192541 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.207058 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.217787 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.231842 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.252692 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.253053 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.253131 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.253231 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.253318 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.253411 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.270751 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:10Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.356051 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.356090 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.356099 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.356113 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.356128 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.458654 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.458694 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.458704 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.458718 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.458730 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.560970 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.561000 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.561009 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.561038 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.561048 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.618897 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.619046 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619097 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:49:18.619071011 +0000 UTC m=+35.594249592 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.619162 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.619228 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619160 4672 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.619267 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619261 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619349 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:18.619328279 +0000 UTC m=+35.594506860 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619357 4672 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619372 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619386 4672 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619403 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:18.619393471 +0000 UTC m=+35.594572052 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619435 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:18.619421551 +0000 UTC m=+35.594600132 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619366 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619720 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619731 4672 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.619773 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:18.619764712 +0000 UTC m=+35.594943293 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.663511 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.663547 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.663557 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.663573 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.663584 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.766315 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.766360 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.766371 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.766391 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.766406 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.870593 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.870966 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.870979 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.871000 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.871043 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.890850 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.890927 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.890993 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.890963 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.891210 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:10 crc kubenswrapper[4672]: E1007 14:49:10.891464 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.974046 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.974086 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.974096 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.974112 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:10 crc kubenswrapper[4672]: I1007 14:49:10.974124 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:10Z","lastTransitionTime":"2025-10-07T14:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.053153 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" event={"ID":"35368809-f456-4e20-8b5f-25442aca1cac","Type":"ContainerStarted","Data":"b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.060533 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.060915 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.069256 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.076593 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.076767 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.076853 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.076921 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.076977 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:11Z","lastTransitionTime":"2025-10-07T14:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.083001 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.093348 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.096953 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.112133 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.127948 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.164619 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.179769 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.179817 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.179830 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.179847 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.179859 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:11Z","lastTransitionTime":"2025-10-07T14:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.188939 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.207982 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.221337 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.233602 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.248337 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.262369 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.276783 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.281894 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.281951 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.281970 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.281986 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.281998 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:11Z","lastTransitionTime":"2025-10-07T14:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.291345 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.301721 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.314627 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.327277 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.343553 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.357736 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.371214 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.381503 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.384570 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.384610 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.384622 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.384648 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.384661 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:11Z","lastTransitionTime":"2025-10-07T14:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.393202 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.408691 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.424384 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.438267 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.459825 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.474734 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.488032 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.488097 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.488111 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.488136 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.488161 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:11Z","lastTransitionTime":"2025-10-07T14:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.489055 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.500958 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.514797 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.592218 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.592295 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.592315 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.592348 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.592372 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:11Z","lastTransitionTime":"2025-10-07T14:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.695646 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.695700 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.695708 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.695722 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.695730 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:11Z","lastTransitionTime":"2025-10-07T14:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.731184 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:11 crc kubenswrapper[4672]: E1007 14:49:11.731330 4672 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:11 crc kubenswrapper[4672]: E1007 14:49:11.731403 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs podName:3bab8ff7-6484-479d-9423-0ce0c8f7beff nodeName:}" failed. No retries permitted until 2025-10-07 14:49:19.731385292 +0000 UTC m=+36.706563873 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs") pod "network-metrics-daemon-mfxdl" (UID: "3bab8ff7-6484-479d-9423-0ce0c8f7beff") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.797982 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.798035 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.798045 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.798061 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.798072 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:11Z","lastTransitionTime":"2025-10-07T14:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.891621 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:11 crc kubenswrapper[4672]: E1007 14:49:11.891834 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.900351 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.900411 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.900426 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.900448 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:11 crc kubenswrapper[4672]: I1007 14:49:11.900462 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:11Z","lastTransitionTime":"2025-10-07T14:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.002949 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.002997 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.003006 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.003042 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.003051 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.064140 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.064550 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.083780 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.096112 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.105500 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.105532 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.105541 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.105558 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.105570 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.109780 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.124190 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.133604 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.145577 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.156364 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.165767 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.177262 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.196997 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.207437 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.207486 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.207497 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.207512 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.207524 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.208904 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.219264 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.234074 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.246236 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.255525 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.268378 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:12Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.309079 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.309123 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.309146 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.309163 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.309175 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.411826 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.411879 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.411889 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.411905 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.411918 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.514344 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.514386 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.514395 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.514409 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.514418 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.616473 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.616506 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.616515 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.616528 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.616537 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.718878 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.718948 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.718960 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.718978 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.719001 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.822217 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.822308 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.822321 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.822343 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.822355 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.891360 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.891406 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.891504 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:12 crc kubenswrapper[4672]: E1007 14:49:12.891609 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:12 crc kubenswrapper[4672]: E1007 14:49:12.891704 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:12 crc kubenswrapper[4672]: E1007 14:49:12.891811 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.925103 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.925164 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.925178 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.925208 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:12 crc kubenswrapper[4672]: I1007 14:49:12.925227 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:12Z","lastTransitionTime":"2025-10-07T14:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.027662 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.027748 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.027761 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.027777 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.027790 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.066999 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.133497 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.133550 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.133578 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.133604 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.133617 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.235868 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.235920 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.235931 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.235948 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.235960 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.338136 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.338186 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.338197 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.338212 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.338222 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.440571 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.440607 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.440615 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.440631 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.440640 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.542970 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.543000 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.543007 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.543034 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.543044 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.645484 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.645538 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.645549 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.645561 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.645571 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.747457 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.747523 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.747545 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.747573 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.747593 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.850407 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.850446 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.850456 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.850474 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.850486 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.891407 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:13 crc kubenswrapper[4672]: E1007 14:49:13.891592 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.904911 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.915778 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.927776 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.942342 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.953445 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.953506 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.953520 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.953587 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.953602 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:13Z","lastTransitionTime":"2025-10-07T14:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.955617 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.967207 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.980157 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:13 crc kubenswrapper[4672]: I1007 14:49:13.992443 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.002795 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.015827 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.035211 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.048248 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.055533 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.055573 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.055581 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.055597 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.055607 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.061841 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.072286 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/0.log" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.072948 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.075201 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3" exitCode=1 Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.075256 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.076262 4672 scope.go:117] "RemoveContainer" containerID="b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.089987 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.102671 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.116463 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.129504 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.139462 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.152222 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.157208 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.157265 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.157277 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.157294 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.157305 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.165645 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.175734 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.190734 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.213171 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:13Z\\\",\\\"message\\\":\\\"topping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1007 14:49:13.580256 6013 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580433 6013 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580486 6013 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580562 6013 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580687 6013 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580921 6013 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.581143 6013 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:13.581175 6013 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 14:49:13.581187 6013 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:13.581200 6013 factory.go:656] Stopping watch factory\\\\nI1007 14:49:13.581214 6013 ovnkube.go:599] Stopped ovnkube\\\\nI1007 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.226791 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.238723 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.252776 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.263190 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.263235 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.263249 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.263267 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.263279 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.265260 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.274895 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.288994 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.365701 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.365748 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.365760 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.365777 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.365790 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.468052 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.468267 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.468341 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.468421 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.468539 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.571251 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.571290 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.571301 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.571316 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.571329 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.673282 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.673312 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.673323 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.673339 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.673350 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.775458 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.775488 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.775497 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.775510 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.775519 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.810798 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.877704 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.877734 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.877744 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.877760 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.877771 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.891484 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.891508 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.891562 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:14 crc kubenswrapper[4672]: E1007 14:49:14.891592 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:14 crc kubenswrapper[4672]: E1007 14:49:14.891689 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:14 crc kubenswrapper[4672]: E1007 14:49:14.891766 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.979596 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.979645 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.979659 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.979672 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:14 crc kubenswrapper[4672]: I1007 14:49:14.979681 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:14Z","lastTransitionTime":"2025-10-07T14:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.080096 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/1.log" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.080698 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/0.log" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.081007 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.081086 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.081104 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.081130 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.081154 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:15Z","lastTransitionTime":"2025-10-07T14:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.082858 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e" exitCode=1 Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.082887 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.082926 4672 scope.go:117] "RemoveContainer" containerID="b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.083393 4672 scope.go:117] "RemoveContainer" containerID="2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e" Oct 07 14:49:15 crc kubenswrapper[4672]: E1007 14:49:15.083523 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.097576 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.106953 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.118827 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.135346 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1eb880a670e5550eda9d8e3b49690deb0edf6bd2bd0e74f23807354a9b248f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:13Z\\\",\\\"message\\\":\\\"topping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1007 14:49:13.580256 6013 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580433 6013 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580486 6013 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580562 6013 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580687 6013 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.580921 6013 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 14:49:13.581143 6013 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:13.581175 6013 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 14:49:13.581187 6013 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:13.581200 6013 factory.go:656] Stopping watch factory\\\\nI1007 14:49:13.581214 6013 ovnkube.go:599] Stopped ovnkube\\\\nI1007 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:14Z\\\",\\\"message\\\":\\\"4.773972 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-mklmj after 0 failed attempt(s)\\\\nI1007 14:49:14.773977 6138 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-mklmj\\\\nI1007 14:49:14.773975 6138 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:14.773982 6138 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zgwqx in node crc\\\\nI1007 14:49:14.773970 6138 factory.go:656] Stopping watch factory\\\\nI1007 14:49:14.773993 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-zgwqx after 0 failed attempt(s)\\\\nI1007 14:49:14.774001 6138 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-zgwqx\\\\nI1007 14:49:14.773979 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 14:49:14.774004 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:14.774058 6138 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 103.433µs)\\\\nI1007 14:49:14.774077 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:14.774141 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.148588 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.161320 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.174642 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.183616 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.183800 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.183893 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.183980 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.184075 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:15Z","lastTransitionTime":"2025-10-07T14:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.187997 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.199799 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.212736 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.223493 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.235927 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.246206 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.254729 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.266255 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:15Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.286622 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.286654 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.286662 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.286675 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.286684 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:15Z","lastTransitionTime":"2025-10-07T14:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.389330 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.389380 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.389393 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.389410 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.389422 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:15Z","lastTransitionTime":"2025-10-07T14:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.491147 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.491186 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.491205 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.491222 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.491234 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:15Z","lastTransitionTime":"2025-10-07T14:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.593495 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.593533 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.593544 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.593559 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.593626 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:15Z","lastTransitionTime":"2025-10-07T14:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.695789 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.695826 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.695836 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.695850 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.695860 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:15Z","lastTransitionTime":"2025-10-07T14:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.798407 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.798465 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.798481 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.798505 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.798523 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:15Z","lastTransitionTime":"2025-10-07T14:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.890940 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:15 crc kubenswrapper[4672]: E1007 14:49:15.891102 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.900071 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.900097 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.900105 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.900116 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:15 crc kubenswrapper[4672]: I1007 14:49:15.900126 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:15Z","lastTransitionTime":"2025-10-07T14:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.001853 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.001896 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.001905 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.001919 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.001927 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.087493 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/1.log" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.090748 4672 scope.go:117] "RemoveContainer" containerID="2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e" Oct 07 14:49:16 crc kubenswrapper[4672]: E1007 14:49:16.090891 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.102740 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.104093 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.104139 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.104150 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.104165 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.104177 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.113918 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.125791 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.137335 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.144991 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.156830 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.167586 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.179479 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.189690 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.198510 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.206448 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.206483 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.206495 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.206513 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.206525 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.213693 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.224296 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.233830 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.244476 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.261810 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:14Z\\\",\\\"message\\\":\\\"4.773972 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-mklmj after 0 failed attempt(s)\\\\nI1007 14:49:14.773977 6138 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-mklmj\\\\nI1007 14:49:14.773975 6138 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:14.773982 6138 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zgwqx in node crc\\\\nI1007 14:49:14.773970 6138 factory.go:656] Stopping watch factory\\\\nI1007 14:49:14.773993 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-zgwqx after 0 failed attempt(s)\\\\nI1007 14:49:14.774001 6138 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-zgwqx\\\\nI1007 14:49:14.773979 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 14:49:14.774004 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:14.774058 6138 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 103.433µs)\\\\nI1007 14:49:14.774077 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:14.774141 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:16Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.309002 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.309052 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.309063 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.309079 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.309090 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.411564 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.411612 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.411622 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.411639 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.411651 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.513828 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.513864 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.513902 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.513930 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.513943 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.616478 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.616510 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.616519 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.616532 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.616544 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.718714 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.718767 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.718783 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.718803 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.718813 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.820757 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.820812 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.820822 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.820838 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.820849 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.891775 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.891812 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.891832 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:16 crc kubenswrapper[4672]: E1007 14:49:16.891897 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:16 crc kubenswrapper[4672]: E1007 14:49:16.892099 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:16 crc kubenswrapper[4672]: E1007 14:49:16.892203 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.923474 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.923526 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.923539 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.923557 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:16 crc kubenswrapper[4672]: I1007 14:49:16.923571 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:16Z","lastTransitionTime":"2025-10-07T14:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.025466 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.025531 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.025541 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.025555 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.025563 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.127171 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.127215 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.127223 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.127238 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.127248 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.208678 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p"] Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.209186 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.211714 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.213110 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.230088 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.230122 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.230131 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.230144 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.230153 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.239637 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.257689 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.273492 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.283943 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b415edcf-67a9-406e-9158-263831ea1b98-env-overrides\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.284025 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b415edcf-67a9-406e-9158-263831ea1b98-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.284047 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b415edcf-67a9-406e-9158-263831ea1b98-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.284076 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqb5d\" (UniqueName: \"kubernetes.io/projected/b415edcf-67a9-406e-9158-263831ea1b98-kube-api-access-rqb5d\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.284495 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.299702 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:14Z\\\",\\\"message\\\":\\\"4.773972 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-mklmj after 0 failed attempt(s)\\\\nI1007 14:49:14.773977 6138 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-mklmj\\\\nI1007 14:49:14.773975 6138 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:14.773982 6138 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zgwqx in node crc\\\\nI1007 14:49:14.773970 6138 factory.go:656] Stopping watch factory\\\\nI1007 14:49:14.773993 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-zgwqx after 0 failed attempt(s)\\\\nI1007 14:49:14.774001 6138 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-zgwqx\\\\nI1007 14:49:14.773979 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 14:49:14.774004 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:14.774058 6138 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 103.433µs)\\\\nI1007 14:49:14.774077 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:14.774141 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.309942 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.319614 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.331959 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.331994 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.332007 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.332039 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.332052 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.333968 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.344274 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.354330 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.364325 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.376487 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.384992 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b415edcf-67a9-406e-9158-263831ea1b98-env-overrides\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.385092 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b415edcf-67a9-406e-9158-263831ea1b98-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.385116 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b415edcf-67a9-406e-9158-263831ea1b98-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.385153 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqb5d\" (UniqueName: \"kubernetes.io/projected/b415edcf-67a9-406e-9158-263831ea1b98-kube-api-access-rqb5d\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.385632 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b415edcf-67a9-406e-9158-263831ea1b98-env-overrides\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.385789 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b415edcf-67a9-406e-9158-263831ea1b98-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.387980 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.390729 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b415edcf-67a9-406e-9158-263831ea1b98-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.399205 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.400505 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqb5d\" (UniqueName: \"kubernetes.io/projected/b415edcf-67a9-406e-9158-263831ea1b98-kube-api-access-rqb5d\") pod \"ovnkube-control-plane-749d76644c-bng2p\" (UID: \"b415edcf-67a9-406e-9158-263831ea1b98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.410988 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.422078 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:17Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.434523 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.434559 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.434570 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.434586 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.434597 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.521243 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" Oct 07 14:49:17 crc kubenswrapper[4672]: W1007 14:49:17.531946 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb415edcf_67a9_406e_9158_263831ea1b98.slice/crio-a59a1df8da29158c185d338145e2509730dc9509975495b3ef571ddd4f105ebe WatchSource:0}: Error finding container a59a1df8da29158c185d338145e2509730dc9509975495b3ef571ddd4f105ebe: Status 404 returned error can't find the container with id a59a1df8da29158c185d338145e2509730dc9509975495b3ef571ddd4f105ebe Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.535908 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.535938 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.535947 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.535961 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.535970 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.637907 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.637943 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.637951 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.637964 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.637973 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.740118 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.740153 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.740162 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.740177 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.740186 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.842581 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.842617 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.842627 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.842641 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.842653 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.891734 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:17 crc kubenswrapper[4672]: E1007 14:49:17.891882 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.945778 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.946077 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.946088 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.946105 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:17 crc kubenswrapper[4672]: I1007 14:49:17.946117 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:17Z","lastTransitionTime":"2025-10-07T14:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.048821 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.048861 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.048873 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.048888 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.048899 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.098372 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" event={"ID":"b415edcf-67a9-406e-9158-263831ea1b98","Type":"ContainerStarted","Data":"6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.098430 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" event={"ID":"b415edcf-67a9-406e-9158-263831ea1b98","Type":"ContainerStarted","Data":"496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.098441 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" event={"ID":"b415edcf-67a9-406e-9158-263831ea1b98","Type":"ContainerStarted","Data":"a59a1df8da29158c185d338145e2509730dc9509975495b3ef571ddd4f105ebe"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.117105 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.130901 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.148650 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.151673 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.151720 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.151734 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.151754 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.151766 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.163826 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.172813 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.185102 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.203206 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.222918 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.238932 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.252258 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.253753 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.253773 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.253783 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.253795 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.253805 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.264238 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.281297 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.295862 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.305750 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.318929 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.335739 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:14Z\\\",\\\"message\\\":\\\"4.773972 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-mklmj after 0 failed attempt(s)\\\\nI1007 14:49:14.773977 6138 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-mklmj\\\\nI1007 14:49:14.773975 6138 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:14.773982 6138 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zgwqx in node crc\\\\nI1007 14:49:14.773970 6138 factory.go:656] Stopping watch factory\\\\nI1007 14:49:14.773993 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-zgwqx after 0 failed attempt(s)\\\\nI1007 14:49:14.774001 6138 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-zgwqx\\\\nI1007 14:49:14.773979 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 14:49:14.774004 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:14.774058 6138 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 103.433µs)\\\\nI1007 14:49:14.774077 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:14.774141 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:18Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.355527 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.355567 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.355575 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.355590 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.355600 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.457786 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.457821 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.457829 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.457843 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.457852 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.559989 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.560044 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.560058 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.560075 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.560086 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.662779 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.662816 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.662825 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.662839 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.662851 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.698570 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.698717 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:49:34.698691677 +0000 UTC m=+51.673870258 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.698798 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.698842 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.698887 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.698910 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699045 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699059 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699069 4672 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699106 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:34.69909931 +0000 UTC m=+51.674277891 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699122 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699137 4672 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699222 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:34.699202343 +0000 UTC m=+51.674380924 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699139 4672 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699263 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:34.699257815 +0000 UTC m=+51.674436396 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699153 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699287 4672 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.699313 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 14:49:34.699307426 +0000 UTC m=+51.674486007 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.764957 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.764998 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.765007 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.765038 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.765048 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.866749 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.866791 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.866802 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.866816 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.866825 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.891231 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.891279 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.891363 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.891397 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.891465 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:18 crc kubenswrapper[4672]: E1007 14:49:18.891643 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.968540 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.968583 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.968594 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.968612 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:18 crc kubenswrapper[4672]: I1007 14:49:18.968623 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:18Z","lastTransitionTime":"2025-10-07T14:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.071258 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.071293 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.071301 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.071313 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.071323 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.173854 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.173918 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.173928 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.173943 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.173954 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.276588 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.276636 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.276646 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.276663 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.276675 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.281962 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.282003 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.282032 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.282050 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.282110 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: E1007 14:49:19.294803 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:19Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.298627 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.298681 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.298694 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.298712 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.298723 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: E1007 14:49:19.310784 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:19Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.313960 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.314011 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.314037 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.314054 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.314064 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: E1007 14:49:19.326452 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:19Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.329981 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.330050 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.330061 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.330074 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.330082 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: E1007 14:49:19.342900 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:19Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.346907 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.346955 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.346966 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.346982 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.346993 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: E1007 14:49:19.358873 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:19Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:19 crc kubenswrapper[4672]: E1007 14:49:19.359007 4672 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.379271 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.379308 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.379318 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.379333 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.379343 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.482188 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.482258 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.482279 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.482376 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.482404 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.585406 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.585441 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.585449 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.585518 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.585530 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.688750 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.688838 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.688857 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.688887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.688909 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.791361 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.791413 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.791426 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.791443 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.791455 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.809236 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:19 crc kubenswrapper[4672]: E1007 14:49:19.809392 4672 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:19 crc kubenswrapper[4672]: E1007 14:49:19.809458 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs podName:3bab8ff7-6484-479d-9423-0ce0c8f7beff nodeName:}" failed. No retries permitted until 2025-10-07 14:49:35.809440621 +0000 UTC m=+52.784619202 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs") pod "network-metrics-daemon-mfxdl" (UID: "3bab8ff7-6484-479d-9423-0ce0c8f7beff") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.891133 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:19 crc kubenswrapper[4672]: E1007 14:49:19.891290 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.893149 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.893189 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.893200 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.893216 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.893227 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.996149 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.996182 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.996191 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.996205 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:19 crc kubenswrapper[4672]: I1007 14:49:19.996215 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:19Z","lastTransitionTime":"2025-10-07T14:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.099057 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.099094 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.099107 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.099123 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.099137 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:20Z","lastTransitionTime":"2025-10-07T14:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.201417 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.201601 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.201609 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.201624 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.201632 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:20Z","lastTransitionTime":"2025-10-07T14:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.307077 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.307137 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.307151 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.307176 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.307191 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:20Z","lastTransitionTime":"2025-10-07T14:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.409799 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.409833 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.409840 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.409853 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.409861 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:20Z","lastTransitionTime":"2025-10-07T14:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.512875 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.513000 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.513050 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.513076 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.513091 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:20Z","lastTransitionTime":"2025-10-07T14:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.615970 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.616048 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.616062 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.616081 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.616196 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:20Z","lastTransitionTime":"2025-10-07T14:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.720178 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.720252 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.720278 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.720313 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.720349 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:20Z","lastTransitionTime":"2025-10-07T14:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.823452 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.823525 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.823541 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.823567 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.823585 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:20Z","lastTransitionTime":"2025-10-07T14:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.891353 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.891354 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:20 crc kubenswrapper[4672]: E1007 14:49:20.891486 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:20 crc kubenswrapper[4672]: E1007 14:49:20.891557 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.891374 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:20 crc kubenswrapper[4672]: E1007 14:49:20.891657 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.926881 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.926932 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.926950 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.926965 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:20 crc kubenswrapper[4672]: I1007 14:49:20.926975 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:20Z","lastTransitionTime":"2025-10-07T14:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.030053 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.030114 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.030143 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.030174 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.030195 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.133126 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.133176 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.133185 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.133201 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.133214 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.235158 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.235197 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.235205 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.235218 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.235228 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.336866 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.336896 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.336906 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.336919 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.336929 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.438695 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.438723 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.438732 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.438745 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.438754 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.541724 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.541766 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.541774 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.541786 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.541795 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.645143 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.645214 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.645228 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.645249 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.645264 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.748774 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.748823 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.748838 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.748859 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.748871 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.852758 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.852864 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.852880 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.852899 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.852914 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.890966 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:21 crc kubenswrapper[4672]: E1007 14:49:21.891321 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.956615 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.956699 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.956718 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.956749 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:21 crc kubenswrapper[4672]: I1007 14:49:21.956772 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:21Z","lastTransitionTime":"2025-10-07T14:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.059753 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.060299 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.060387 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.060505 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.060797 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.163993 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.164100 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.164121 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.164147 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.164166 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.266855 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.266916 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.266939 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.266964 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.266981 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.369639 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.369700 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.369714 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.369733 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.369746 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.472340 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.472388 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.472403 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.472425 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.472441 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.574501 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.574542 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.574557 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.574573 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.574588 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.677061 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.677094 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.677103 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.677120 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.677129 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.779482 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.779811 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.779842 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.779872 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.779886 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.882344 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.882375 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.882384 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.882395 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.882405 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.891028 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:22 crc kubenswrapper[4672]: E1007 14:49:22.891298 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.891177 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:22 crc kubenswrapper[4672]: E1007 14:49:22.891509 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.891062 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:22 crc kubenswrapper[4672]: E1007 14:49:22.891725 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.985078 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.985113 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.985122 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.985135 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:22 crc kubenswrapper[4672]: I1007 14:49:22.985144 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:22Z","lastTransitionTime":"2025-10-07T14:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.087458 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.087492 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.087500 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.087512 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.087526 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:23Z","lastTransitionTime":"2025-10-07T14:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.191168 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.191225 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.191243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.191262 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.191272 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:23Z","lastTransitionTime":"2025-10-07T14:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.293797 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.293842 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.293854 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.293870 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.293880 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:23Z","lastTransitionTime":"2025-10-07T14:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.396243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.396303 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.396313 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.396325 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.396335 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:23Z","lastTransitionTime":"2025-10-07T14:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.498474 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.498520 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.498535 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.498553 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.498565 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:23Z","lastTransitionTime":"2025-10-07T14:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.601218 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.601267 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.601278 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.601295 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.601305 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:23Z","lastTransitionTime":"2025-10-07T14:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.703503 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.703560 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.703572 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.703585 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.703595 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:23Z","lastTransitionTime":"2025-10-07T14:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.824906 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.824936 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.824945 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.824958 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.824968 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:23Z","lastTransitionTime":"2025-10-07T14:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.891690 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:23 crc kubenswrapper[4672]: E1007 14:49:23.891838 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.904259 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:23Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.919103 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:23Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.927778 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.927825 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.927835 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.927850 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.927861 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:23Z","lastTransitionTime":"2025-10-07T14:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.932344 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:23Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.943151 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:23Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.953225 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:23Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.964968 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:23Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.980691 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:23Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:23 crc kubenswrapper[4672]: I1007 14:49:23.991264 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:23Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.001503 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:23Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.010879 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:24Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.021968 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:24Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.029703 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.029728 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.029736 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.029751 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.029763 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.032974 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:24Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.042623 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:24Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.052904 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:24Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.069977 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:14Z\\\",\\\"message\\\":\\\"4.773972 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-mklmj after 0 failed attempt(s)\\\\nI1007 14:49:14.773977 6138 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-mklmj\\\\nI1007 14:49:14.773975 6138 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:14.773982 6138 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zgwqx in node crc\\\\nI1007 14:49:14.773970 6138 factory.go:656] Stopping watch factory\\\\nI1007 14:49:14.773993 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-zgwqx after 0 failed attempt(s)\\\\nI1007 14:49:14.774001 6138 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-zgwqx\\\\nI1007 14:49:14.773979 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 14:49:14.774004 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:14.774058 6138 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 103.433µs)\\\\nI1007 14:49:14.774077 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:14.774141 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:24Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.080918 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:24Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.131595 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.131627 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.131636 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.131650 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.131660 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.233776 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.233807 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.233815 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.233826 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.233834 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.335996 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.336064 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.336074 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.336090 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.336102 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.438270 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.438309 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.438317 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.438330 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.438339 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.540144 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.540179 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.540188 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.540202 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.540210 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.641946 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.641992 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.642004 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.642036 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.642053 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.744869 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.745148 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.745218 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.745287 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.745346 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.847110 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.847375 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.847466 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.847564 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.847643 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.891680 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:24 crc kubenswrapper[4672]: E1007 14:49:24.891990 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.891746 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:24 crc kubenswrapper[4672]: E1007 14:49:24.892302 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.891696 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:24 crc kubenswrapper[4672]: E1007 14:49:24.892538 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.949665 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.949708 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.949719 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.949738 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:24 crc kubenswrapper[4672]: I1007 14:49:24.949749 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:24Z","lastTransitionTime":"2025-10-07T14:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.052038 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.052097 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.052109 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.052127 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.052142 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.154807 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.154847 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.154855 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.154871 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.154882 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.257359 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.257442 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.257476 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.257509 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.257531 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.360707 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.360765 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.360775 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.360798 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.360809 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.463999 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.464056 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.464071 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.464087 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.464096 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.566502 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.566545 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.566557 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.566580 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.566593 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.671137 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.671187 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.671196 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.671242 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.671253 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.774493 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.774539 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.774549 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.774565 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.774575 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.876284 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.876316 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.876324 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.876339 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.876347 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.890853 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:25 crc kubenswrapper[4672]: E1007 14:49:25.891044 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.978333 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.978372 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.978380 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.978395 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:25 crc kubenswrapper[4672]: I1007 14:49:25.978404 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:25Z","lastTransitionTime":"2025-10-07T14:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.080758 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.080792 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.080803 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.080818 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.080829 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:26Z","lastTransitionTime":"2025-10-07T14:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.184259 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.184303 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.184312 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.184327 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.184338 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:26Z","lastTransitionTime":"2025-10-07T14:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.288412 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.288500 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.288512 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.288530 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.288540 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:26Z","lastTransitionTime":"2025-10-07T14:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.391597 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.391656 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.391668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.391691 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.391708 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:26Z","lastTransitionTime":"2025-10-07T14:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.495315 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.495373 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.495385 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.495406 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.495418 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:26Z","lastTransitionTime":"2025-10-07T14:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.598552 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.598592 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.598602 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.598618 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.598628 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:26Z","lastTransitionTime":"2025-10-07T14:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.701992 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.702058 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.702068 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.702082 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.702094 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:26Z","lastTransitionTime":"2025-10-07T14:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.805926 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.806010 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.806099 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.806139 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.806165 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:26Z","lastTransitionTime":"2025-10-07T14:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.890770 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:26 crc kubenswrapper[4672]: E1007 14:49:26.890880 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.890784 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:26 crc kubenswrapper[4672]: E1007 14:49:26.891373 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.891431 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:26 crc kubenswrapper[4672]: E1007 14:49:26.891494 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.891703 4672 scope.go:117] "RemoveContainer" containerID="2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.910194 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.910771 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.910796 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.910830 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:26 crc kubenswrapper[4672]: I1007 14:49:26.910855 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:26Z","lastTransitionTime":"2025-10-07T14:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.015227 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.015652 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.015811 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.015928 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.016084 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.119593 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.119636 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.119646 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.119668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.119682 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.129099 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/1.log" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.132386 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.133060 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.149850 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.161994 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.176062 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.192639 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.209311 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.222400 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.222452 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.222469 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.222490 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.222507 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.227480 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.241547 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.256991 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.274503 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.289790 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.310009 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.325298 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.325342 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.325355 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.325381 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.325399 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.334920 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:14Z\\\",\\\"message\\\":\\\"4.773972 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-mklmj after 0 failed attempt(s)\\\\nI1007 14:49:14.773977 6138 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-mklmj\\\\nI1007 14:49:14.773975 6138 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:14.773982 6138 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zgwqx in node crc\\\\nI1007 14:49:14.773970 6138 factory.go:656] Stopping watch factory\\\\nI1007 14:49:14.773993 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-zgwqx after 0 failed attempt(s)\\\\nI1007 14:49:14.774001 6138 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-zgwqx\\\\nI1007 14:49:14.773979 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 14:49:14.774004 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:14.774058 6138 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 103.433µs)\\\\nI1007 14:49:14.774077 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:14.774141 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.356030 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.372626 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.384156 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.399204 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:27Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.428478 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.428769 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.428928 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.429494 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.429621 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.532833 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.533115 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.533208 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.533283 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.533348 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.640212 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.640284 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.640298 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.640321 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.640334 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.743158 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.743221 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.743398 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.743415 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.743424 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.846080 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.846119 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.846128 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.846143 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.846153 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.890848 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:27 crc kubenswrapper[4672]: E1007 14:49:27.891067 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.948372 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.948438 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.948453 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.948481 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:27 crc kubenswrapper[4672]: I1007 14:49:27.948497 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:27Z","lastTransitionTime":"2025-10-07T14:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.050801 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.050841 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.050852 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.050869 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.050881 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.137532 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/2.log" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.138493 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/1.log" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.142082 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c" exitCode=1 Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.142141 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.142182 4672 scope.go:117] "RemoveContainer" containerID="2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.142871 4672 scope.go:117] "RemoveContainer" containerID="fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c" Oct 07 14:49:28 crc kubenswrapper[4672]: E1007 14:49:28.143075 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.153645 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.153844 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.153933 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.154088 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.154247 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.155698 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.171880 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.185599 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.200823 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.212367 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.227148 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.238436 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.250562 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.255904 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.255947 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.255961 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.255979 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.255992 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.262533 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.272406 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.285377 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.298127 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.308670 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.323081 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.342286 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf5bc64c4f57efc5edfdf0a981c9f187aa0a4cba52dbcdac108105aaf14d22e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:14Z\\\",\\\"message\\\":\\\"4.773972 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-mklmj after 0 failed attempt(s)\\\\nI1007 14:49:14.773977 6138 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-mklmj\\\\nI1007 14:49:14.773975 6138 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:14.773982 6138 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zgwqx in node crc\\\\nI1007 14:49:14.773970 6138 factory.go:656] Stopping watch factory\\\\nI1007 14:49:14.773993 6138 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-zgwqx after 0 failed attempt(s)\\\\nI1007 14:49:14.774001 6138 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-zgwqx\\\\nI1007 14:49:14.773979 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 14:49:14.774004 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:14.774058 6138 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 103.433µs)\\\\nI1007 14:49:14.774077 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:14.774141 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:27Z\\\",\\\"message\\\":\\\"19615025667110816) with []\\\\nI1007 14:49:27.706596 6371 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1007 14:49:27.706682 6371 factory.go:1336] Added *v1.Node event handler 7\\\\nI1007 14:49:27.706736 6371 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1007 14:49:27.707086 6371 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707181 6371 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1007 14:49:27.707214 6371 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 14:49:27.707223 6371 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 14:49:27.707235 6371 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:27.707256 6371 factory.go:656] Stopping watch factory\\\\nI1007 14:49:27.707272 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:27.707299 6371 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 14:49:27.707308 6371 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:27.707313 6371 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707324 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:27.707399 6371 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.358163 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.358199 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.358208 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.358222 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.358231 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.359688 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:28Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.461010 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.461084 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.461097 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.461115 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.461127 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.563624 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.563667 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.563676 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.563694 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.563704 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.665985 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.666042 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.666052 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.666071 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.666087 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.768774 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.768849 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.768860 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.768880 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.768898 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.871531 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.871566 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.871573 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.871587 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.871597 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.891679 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.891761 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.891811 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:28 crc kubenswrapper[4672]: E1007 14:49:28.891828 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:28 crc kubenswrapper[4672]: E1007 14:49:28.891925 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:28 crc kubenswrapper[4672]: E1007 14:49:28.892033 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.973318 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.973367 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.973382 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.973398 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:28 crc kubenswrapper[4672]: I1007 14:49:28.973408 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:28Z","lastTransitionTime":"2025-10-07T14:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.077189 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.077237 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.077257 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.077275 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.077287 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.146597 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/2.log" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.149802 4672 scope.go:117] "RemoveContainer" containerID="fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c" Oct 07 14:49:29 crc kubenswrapper[4672]: E1007 14:49:29.149941 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.163685 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.176874 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.179823 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.179850 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.179858 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.179870 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.179880 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.186733 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.199749 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.218647 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:27Z\\\",\\\"message\\\":\\\"19615025667110816) with []\\\\nI1007 14:49:27.706596 6371 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1007 14:49:27.706682 6371 factory.go:1336] Added *v1.Node event handler 7\\\\nI1007 14:49:27.706736 6371 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1007 14:49:27.707086 6371 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707181 6371 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1007 14:49:27.707214 6371 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 14:49:27.707223 6371 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 14:49:27.707235 6371 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:27.707256 6371 factory.go:656] Stopping watch factory\\\\nI1007 14:49:27.707272 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:27.707299 6371 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 14:49:27.707308 6371 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:27.707313 6371 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707324 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:27.707399 6371 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.229844 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.240863 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.251598 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.264560 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.275572 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.281856 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.281893 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.281904 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.281919 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.281930 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.284608 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.294285 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.307047 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.336817 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.351790 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.362132 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.384286 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.384321 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.384329 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.384341 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.384350 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.486514 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.486549 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.486557 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.486569 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.486578 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.588887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.588923 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.588932 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.588947 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.588955 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.691383 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.691433 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.691441 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.691456 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.691469 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.694953 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.695076 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.695115 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.695148 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.695168 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: E1007 14:49:29.709671 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.713044 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.713092 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.713102 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.713115 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.713125 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: E1007 14:49:29.725853 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.728527 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.728553 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.728561 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.728575 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.728587 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: E1007 14:49:29.740876 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.744412 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.744438 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.744446 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.744474 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.744496 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: E1007 14:49:29.755620 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.758916 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.758963 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.758975 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.758991 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.759002 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: E1007 14:49:29.769413 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:29Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:29 crc kubenswrapper[4672]: E1007 14:49:29.769572 4672 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.794077 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.794113 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.794124 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.794139 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.794150 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.891444 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:29 crc kubenswrapper[4672]: E1007 14:49:29.891565 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.896692 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.896734 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.896746 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.896761 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.896772 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.998847 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.998876 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.998886 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.998899 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:29 crc kubenswrapper[4672]: I1007 14:49:29.998909 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:29Z","lastTransitionTime":"2025-10-07T14:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.100727 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.100781 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.100795 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.100812 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.100824 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:30Z","lastTransitionTime":"2025-10-07T14:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.202973 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.203051 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.203063 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.203076 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.203085 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:30Z","lastTransitionTime":"2025-10-07T14:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.305174 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.305206 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.305215 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.305228 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.305236 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:30Z","lastTransitionTime":"2025-10-07T14:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.407410 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.407456 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.407468 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.407484 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.407497 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:30Z","lastTransitionTime":"2025-10-07T14:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.509536 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.509584 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.509592 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.509608 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.509620 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:30Z","lastTransitionTime":"2025-10-07T14:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.611484 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.611547 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.611562 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.611584 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.611600 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:30Z","lastTransitionTime":"2025-10-07T14:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.714157 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.714191 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.714199 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.714210 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.714221 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:30Z","lastTransitionTime":"2025-10-07T14:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.816795 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.816826 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.816836 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.816849 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.816858 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:30Z","lastTransitionTime":"2025-10-07T14:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.891000 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.891050 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:30 crc kubenswrapper[4672]: E1007 14:49:30.891173 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.891234 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:30 crc kubenswrapper[4672]: E1007 14:49:30.891335 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:30 crc kubenswrapper[4672]: E1007 14:49:30.891421 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.919307 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.919341 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.919349 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.919362 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:30 crc kubenswrapper[4672]: I1007 14:49:30.919372 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:30Z","lastTransitionTime":"2025-10-07T14:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.021583 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.021623 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.021640 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.021654 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.021664 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.123388 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.123427 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.123436 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.123450 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.123460 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.225514 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.225547 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.225555 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.225567 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.225575 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.327399 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.327447 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.327459 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.327473 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.327482 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.429770 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.429827 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.429839 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.429856 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.429876 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.532378 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.532419 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.532427 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.532442 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.532461 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.634564 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.634608 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.634619 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.634638 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.634654 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.736652 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.736704 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.736711 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.736731 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.736747 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.839543 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.839589 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.839599 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.839615 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.839624 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.891374 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:31 crc kubenswrapper[4672]: E1007 14:49:31.891532 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.941645 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.941699 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.941711 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.941728 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:31 crc kubenswrapper[4672]: I1007 14:49:31.941740 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:31Z","lastTransitionTime":"2025-10-07T14:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.043576 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.043609 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.043617 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.043630 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.043639 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.146008 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.146080 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.146090 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.146118 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.146127 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.248622 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.248672 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.248681 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.248695 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.248737 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.350997 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.351059 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.351071 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.351087 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.351098 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.453957 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.454006 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.454048 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.454067 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.454079 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.556117 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.556149 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.556157 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.556175 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.556201 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.658444 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.658497 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.658510 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.658525 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.658536 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.760320 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.760357 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.760369 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.760413 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.760423 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.862681 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.862723 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.862734 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.862750 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.862761 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.891163 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.891247 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:32 crc kubenswrapper[4672]: E1007 14:49:32.891297 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:32 crc kubenswrapper[4672]: E1007 14:49:32.891317 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.891245 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:32 crc kubenswrapper[4672]: E1007 14:49:32.891375 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.964534 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.964578 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.964590 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.964606 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:32 crc kubenswrapper[4672]: I1007 14:49:32.964616 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:32Z","lastTransitionTime":"2025-10-07T14:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.066829 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.066864 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.066873 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.066886 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.066899 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.169415 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.169449 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.169466 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.169484 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.169495 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.271449 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.271486 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.271494 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.271508 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.271517 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.374413 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.374445 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.374455 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.374470 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.374480 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.476221 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.476311 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.476349 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.476366 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.476379 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.578957 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.578996 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.579004 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.579037 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.579046 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.681481 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.681518 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.681529 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.681546 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.681558 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.783105 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.783150 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.783159 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.783175 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.783186 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.885211 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.885236 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.885245 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.885259 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.885268 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.891831 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:33 crc kubenswrapper[4672]: E1007 14:49:33.891959 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.906344 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:33Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.919785 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:33Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.930820 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:33Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.944260 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:33Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.962187 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:27Z\\\",\\\"message\\\":\\\"19615025667110816) with []\\\\nI1007 14:49:27.706596 6371 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1007 14:49:27.706682 6371 factory.go:1336] Added *v1.Node event handler 7\\\\nI1007 14:49:27.706736 6371 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1007 14:49:27.707086 6371 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707181 6371 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1007 14:49:27.707214 6371 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 14:49:27.707223 6371 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 14:49:27.707235 6371 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:27.707256 6371 factory.go:656] Stopping watch factory\\\\nI1007 14:49:27.707272 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:27.707299 6371 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 14:49:27.707308 6371 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:27.707313 6371 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707324 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:27.707399 6371 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:33Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.976199 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:33Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.987486 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.987530 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.987540 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.987553 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.987562 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:33Z","lastTransitionTime":"2025-10-07T14:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.988935 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:33Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:33 crc kubenswrapper[4672]: I1007 14:49:33.998844 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:33Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.012938 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:34Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.028405 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:34Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.038698 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:34Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.052071 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:34Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.064038 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:34Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.074783 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:34Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.086133 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:34Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.089680 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.089712 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.089721 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.089733 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.089742 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:34Z","lastTransitionTime":"2025-10-07T14:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.097272 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:34Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.192002 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.192063 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.192075 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.192090 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.192099 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:34Z","lastTransitionTime":"2025-10-07T14:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.294356 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.294408 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.294420 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.294444 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.294457 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:34Z","lastTransitionTime":"2025-10-07T14:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.396871 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.396902 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.396912 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.396925 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.396934 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:34Z","lastTransitionTime":"2025-10-07T14:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.499009 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.499069 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.499085 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.499103 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.499114 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:34Z","lastTransitionTime":"2025-10-07T14:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.601943 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.602227 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.602239 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.602254 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.602266 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:34Z","lastTransitionTime":"2025-10-07T14:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.704296 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.704347 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.704356 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.704368 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.704377 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:34Z","lastTransitionTime":"2025-10-07T14:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.763913 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.764004 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.764049 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.764076 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.764097 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764123 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:50:06.764101791 +0000 UTC m=+83.739280382 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764196 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764209 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764207 4672 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764242 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764280 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764293 4672 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764302 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:50:06.764282697 +0000 UTC m=+83.739461278 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764327 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 14:50:06.764318348 +0000 UTC m=+83.739496929 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764213 4672 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764220 4672 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764419 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:50:06.764402481 +0000 UTC m=+83.739581162 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.764466 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 14:50:06.764457652 +0000 UTC m=+83.739636233 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.806222 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.806259 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.806267 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.806281 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.806291 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:34Z","lastTransitionTime":"2025-10-07T14:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.891416 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.891450 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.891472 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.891536 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.891583 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:34 crc kubenswrapper[4672]: E1007 14:49:34.891638 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.908180 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.908217 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.908225 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.908239 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:34 crc kubenswrapper[4672]: I1007 14:49:34.908249 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:34Z","lastTransitionTime":"2025-10-07T14:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.009660 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.009696 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.009705 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.009719 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.009729 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.111596 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.111652 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.111668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.111687 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.111698 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.214023 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.214080 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.214090 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.214105 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.214116 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.316335 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.316380 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.316392 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.316407 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.316421 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.419278 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.419352 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.419370 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.419396 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.419411 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.521709 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.521759 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.521775 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.521796 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.521814 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.625133 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.625185 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.625196 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.625211 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.625221 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.727781 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.727828 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.727840 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.727855 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.727867 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.829933 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.829994 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.830067 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.830098 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.830118 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.876339 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:35 crc kubenswrapper[4672]: E1007 14:49:35.876536 4672 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:35 crc kubenswrapper[4672]: E1007 14:49:35.876725 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs podName:3bab8ff7-6484-479d-9423-0ce0c8f7beff nodeName:}" failed. No retries permitted until 2025-10-07 14:50:07.876697592 +0000 UTC m=+84.851876203 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs") pod "network-metrics-daemon-mfxdl" (UID: "3bab8ff7-6484-479d-9423-0ce0c8f7beff") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.891036 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:35 crc kubenswrapper[4672]: E1007 14:49:35.891201 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.932713 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.932755 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.932766 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.932781 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:35 crc kubenswrapper[4672]: I1007 14:49:35.932790 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:35Z","lastTransitionTime":"2025-10-07T14:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.035701 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.035744 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.035752 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.035768 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.035779 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.138587 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.138621 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.138630 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.138643 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.138652 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.240505 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.240545 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.240558 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.240575 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.240588 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.342883 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.342948 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.342958 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.342974 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.342986 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.445517 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.445566 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.445575 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.445592 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.445602 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.548174 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.548226 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.548240 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.548257 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.548268 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.651048 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.651090 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.651100 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.651116 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.651128 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.753924 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.753972 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.753982 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.753997 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.754006 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.856718 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.856771 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.856783 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.856801 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.856814 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.890845 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:36 crc kubenswrapper[4672]: E1007 14:49:36.890997 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.890872 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.890864 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:36 crc kubenswrapper[4672]: E1007 14:49:36.891126 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:36 crc kubenswrapper[4672]: E1007 14:49:36.891270 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.959322 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.959358 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.959366 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.959380 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:36 crc kubenswrapper[4672]: I1007 14:49:36.959389 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:36Z","lastTransitionTime":"2025-10-07T14:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.061240 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.061275 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.061285 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.061304 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.061316 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.082411 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.091191 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.094891 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.103758 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.114675 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.125598 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.136609 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.147426 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.157447 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.163250 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.163848 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.163861 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.163875 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.163884 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.171415 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.182696 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.192447 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.204576 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.220978 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:27Z\\\",\\\"message\\\":\\\"19615025667110816) with []\\\\nI1007 14:49:27.706596 6371 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1007 14:49:27.706682 6371 factory.go:1336] Added *v1.Node event handler 7\\\\nI1007 14:49:27.706736 6371 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1007 14:49:27.707086 6371 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707181 6371 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1007 14:49:27.707214 6371 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 14:49:27.707223 6371 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 14:49:27.707235 6371 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:27.707256 6371 factory.go:656] Stopping watch factory\\\\nI1007 14:49:27.707272 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:27.707299 6371 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 14:49:27.707308 6371 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:27.707313 6371 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707324 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:27.707399 6371 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.233651 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.248549 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.258727 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.266556 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.266588 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.266598 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.266611 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.266622 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.274248 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:37Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.368730 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.368978 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.369076 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.369357 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.369370 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.471511 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.471545 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.471553 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.471567 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.471576 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.574628 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.574680 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.574692 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.574711 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.574723 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.676731 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.676781 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.676794 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.676817 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.676834 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.779086 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.779128 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.779139 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.779154 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.779164 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.881438 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.881490 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.881507 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.881531 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.881546 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.891085 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:37 crc kubenswrapper[4672]: E1007 14:49:37.891236 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.984274 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.984328 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.984343 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.984367 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:37 crc kubenswrapper[4672]: I1007 14:49:37.984391 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:37Z","lastTransitionTime":"2025-10-07T14:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.085999 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.086083 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.086096 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.086115 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.086126 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:38Z","lastTransitionTime":"2025-10-07T14:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.188314 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.188353 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.188363 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.188376 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.188386 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:38Z","lastTransitionTime":"2025-10-07T14:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.291213 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.291264 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.291279 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.291298 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.291313 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:38Z","lastTransitionTime":"2025-10-07T14:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.394080 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.394121 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.394132 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.394149 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.394161 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:38Z","lastTransitionTime":"2025-10-07T14:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.496849 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.496937 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.496958 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.496987 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.497005 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:38Z","lastTransitionTime":"2025-10-07T14:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.599668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.599706 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.599715 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.599728 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.599739 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:38Z","lastTransitionTime":"2025-10-07T14:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.701855 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.701901 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.701944 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.701979 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.701991 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:38Z","lastTransitionTime":"2025-10-07T14:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.804428 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.804468 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.804483 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.804498 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.804508 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:38Z","lastTransitionTime":"2025-10-07T14:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.890863 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.890929 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:38 crc kubenswrapper[4672]: E1007 14:49:38.890978 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.890929 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:38 crc kubenswrapper[4672]: E1007 14:49:38.891059 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:38 crc kubenswrapper[4672]: E1007 14:49:38.891105 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.906305 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.906341 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.906350 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.906362 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:38 crc kubenswrapper[4672]: I1007 14:49:38.906371 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:38Z","lastTransitionTime":"2025-10-07T14:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.009273 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.009316 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.009329 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.009347 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.009360 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.111349 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.111378 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.111386 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.111397 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.111407 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.213797 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.214084 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.214220 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.214319 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.214398 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.316642 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.316909 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.316970 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.317058 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.317116 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.418992 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.419302 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.419381 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.419466 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.419545 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.521297 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.521334 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.521345 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.521360 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.521372 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.624196 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.624249 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.624261 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.624276 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.624288 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.726861 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.726909 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.726920 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.726934 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.726944 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.829945 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.829991 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.830001 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.830037 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.830066 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.891115 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:39 crc kubenswrapper[4672]: E1007 14:49:39.891360 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.892143 4672 scope.go:117] "RemoveContainer" containerID="fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c" Oct 07 14:49:39 crc kubenswrapper[4672]: E1007 14:49:39.892450 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.932489 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.932522 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.932530 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.932541 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.932551 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.962452 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.962492 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.962502 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.962513 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.962523 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: E1007 14:49:39.974492 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:39Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.977845 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.977878 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.977887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.977900 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.977908 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:39 crc kubenswrapper[4672]: E1007 14:49:39.991153 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:39Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.994411 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.994451 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.994459 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.994472 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:39 crc kubenswrapper[4672]: I1007 14:49:39.994481 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:39Z","lastTransitionTime":"2025-10-07T14:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: E1007 14:49:40.005511 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:40Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.009237 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.009437 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.009527 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.009609 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.009682 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: E1007 14:49:40.021006 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:40Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.024506 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.024545 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.024556 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.024569 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.024578 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: E1007 14:49:40.035067 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:40Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:40 crc kubenswrapper[4672]: E1007 14:49:40.035199 4672 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.036816 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.036864 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.036876 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.036893 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.036904 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.138684 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.138723 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.138731 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.138745 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.138754 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.240750 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.240783 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.240817 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.240834 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.240845 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.343160 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.343196 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.343204 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.343218 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.343235 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.446101 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.446136 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.446144 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.446156 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.446166 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.548709 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.548750 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.548758 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.548773 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.548782 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.651329 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.651362 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.651370 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.651384 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.651393 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.754004 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.754058 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.754068 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.754085 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.754096 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.855988 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.856062 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.856072 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.856085 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.856096 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.891516 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.891516 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.891539 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:40 crc kubenswrapper[4672]: E1007 14:49:40.891990 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:40 crc kubenswrapper[4672]: E1007 14:49:40.892095 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:40 crc kubenswrapper[4672]: E1007 14:49:40.892048 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.958443 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.958480 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.958489 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.958502 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:40 crc kubenswrapper[4672]: I1007 14:49:40.958511 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:40Z","lastTransitionTime":"2025-10-07T14:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.060065 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.060107 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.060115 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.060129 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.060138 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.162634 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.162671 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.162681 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.162694 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.162705 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.265280 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.265490 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.265556 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.265663 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.265735 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.367711 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.367749 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.367761 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.367776 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.367787 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.469840 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.469880 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.469889 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.469901 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.469910 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.572190 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.572222 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.572229 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.572242 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.572251 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.674651 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.674703 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.674720 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.674741 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.674757 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.777086 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.777127 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.777137 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.777150 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.777159 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.879190 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.879230 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.879238 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.879251 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.879259 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.891865 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:41 crc kubenswrapper[4672]: E1007 14:49:41.892167 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.981594 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.981638 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.981648 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.981664 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:41 crc kubenswrapper[4672]: I1007 14:49:41.981675 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:41Z","lastTransitionTime":"2025-10-07T14:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.084065 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.084101 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.084117 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.084132 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.084142 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:42Z","lastTransitionTime":"2025-10-07T14:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.185964 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.186001 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.186028 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.186046 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.186077 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:42Z","lastTransitionTime":"2025-10-07T14:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.288339 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.289417 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.289431 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.289445 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.289453 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:42Z","lastTransitionTime":"2025-10-07T14:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.391632 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.391668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.391676 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.391690 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.391699 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:42Z","lastTransitionTime":"2025-10-07T14:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.496160 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.496202 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.496212 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.496228 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.496240 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:42Z","lastTransitionTime":"2025-10-07T14:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.599059 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.599106 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.599125 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.599142 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.599157 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:42Z","lastTransitionTime":"2025-10-07T14:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.701463 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.701504 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.701512 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.701526 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.701542 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:42Z","lastTransitionTime":"2025-10-07T14:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.803945 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.803992 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.804000 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.804029 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.804041 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:42Z","lastTransitionTime":"2025-10-07T14:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.891035 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.891076 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:42 crc kubenswrapper[4672]: E1007 14:49:42.891169 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.891041 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:42 crc kubenswrapper[4672]: E1007 14:49:42.891266 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:42 crc kubenswrapper[4672]: E1007 14:49:42.891341 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.905655 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.905716 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.905728 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.905744 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:42 crc kubenswrapper[4672]: I1007 14:49:42.905753 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:42Z","lastTransitionTime":"2025-10-07T14:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.008130 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.008172 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.008189 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.008204 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.008215 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.110764 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.110800 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.110809 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.110823 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.110835 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.213721 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.213760 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.213771 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.213785 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.213796 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.316157 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.316197 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.316205 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.316220 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.316229 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.417809 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.417873 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.417887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.417902 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.417912 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.520558 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.520596 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.520609 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.520625 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.520637 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.622654 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.622696 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.622707 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.622726 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.622737 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.725172 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.725203 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.725211 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.725260 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.725270 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.827543 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.827575 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.827582 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.827595 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.827603 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.891192 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:43 crc kubenswrapper[4672]: E1007 14:49:43.891311 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.904425 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:43Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.914975 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:43Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.925111 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:43Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.929457 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.929493 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.929501 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.929515 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.929525 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:43Z","lastTransitionTime":"2025-10-07T14:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.938230 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:43Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.960394 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:27Z\\\",\\\"message\\\":\\\"19615025667110816) with []\\\\nI1007 14:49:27.706596 6371 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1007 14:49:27.706682 6371 factory.go:1336] Added *v1.Node event handler 7\\\\nI1007 14:49:27.706736 6371 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1007 14:49:27.707086 6371 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707181 6371 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1007 14:49:27.707214 6371 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 14:49:27.707223 6371 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 14:49:27.707235 6371 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:27.707256 6371 factory.go:656] Stopping watch factory\\\\nI1007 14:49:27.707272 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:27.707299 6371 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 14:49:27.707308 6371 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:27.707313 6371 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707324 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:27.707399 6371 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:43Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.976303 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:43Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:43 crc kubenswrapper[4672]: I1007 14:49:43.991564 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:43Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.002268 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.015416 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.029145 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.035669 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.035710 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.035722 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.035740 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.035754 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.038934 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.048875 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.065996 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.077074 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93c9f521-38f1-422e-a980-a3e7b59e2187\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.090179 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.101861 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.114329 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:44Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.138209 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.138241 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.138249 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.138261 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.138269 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.240309 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.240669 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.240737 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.240801 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.240857 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.343219 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.343260 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.343271 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.343289 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.343300 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.445795 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.445836 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.445847 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.445863 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.445873 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.548625 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.548668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.548680 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.548695 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.548706 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.652092 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.652137 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.652149 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.652172 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.652187 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.754983 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.755096 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.755108 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.755130 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.755146 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.857180 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.857268 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.857287 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.857312 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.857329 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.890789 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.890988 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:44 crc kubenswrapper[4672]: E1007 14:49:44.891219 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.891288 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:44 crc kubenswrapper[4672]: E1007 14:49:44.891352 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:44 crc kubenswrapper[4672]: E1007 14:49:44.891441 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.960157 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.960194 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.960204 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.960217 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:44 crc kubenswrapper[4672]: I1007 14:49:44.960226 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:44Z","lastTransitionTime":"2025-10-07T14:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.062403 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.062443 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.062452 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.062467 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.062476 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.165073 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.165110 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.165126 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.165143 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.165154 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.267108 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.267318 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.267384 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.267479 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.267544 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.370101 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.370144 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.370152 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.370166 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.370176 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.472544 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.472609 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.472621 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.472638 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.472649 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.575350 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.575427 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.575437 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.575450 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.575459 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.678236 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.678275 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.678284 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.678298 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.678307 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.780718 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.780756 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.780765 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.780780 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.780789 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.883209 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.883244 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.883255 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.883270 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.883283 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.891521 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:45 crc kubenswrapper[4672]: E1007 14:49:45.891770 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.985455 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.985497 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.985508 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.985521 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:45 crc kubenswrapper[4672]: I1007 14:49:45.985532 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:45Z","lastTransitionTime":"2025-10-07T14:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.088474 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.088521 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.088530 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.088543 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.088552 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:46Z","lastTransitionTime":"2025-10-07T14:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.191787 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.191855 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.191872 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.191898 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.191917 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:46Z","lastTransitionTime":"2025-10-07T14:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.294916 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.294965 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.294975 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.294993 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.295006 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:46Z","lastTransitionTime":"2025-10-07T14:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.397575 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.397626 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.397640 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.397659 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.397670 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:46Z","lastTransitionTime":"2025-10-07T14:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.500850 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.500888 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.500901 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.500919 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.500932 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:46Z","lastTransitionTime":"2025-10-07T14:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.603647 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.603679 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.603687 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.603699 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.603710 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:46Z","lastTransitionTime":"2025-10-07T14:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.708174 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.708251 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.708275 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.708309 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.708335 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:46Z","lastTransitionTime":"2025-10-07T14:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.812530 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.813103 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.813128 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.813160 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.813179 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:46Z","lastTransitionTime":"2025-10-07T14:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.891457 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:46 crc kubenswrapper[4672]: E1007 14:49:46.891692 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.892073 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:46 crc kubenswrapper[4672]: E1007 14:49:46.892300 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.892410 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:46 crc kubenswrapper[4672]: E1007 14:49:46.892615 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.918168 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.918261 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.918279 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.918861 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:46 crc kubenswrapper[4672]: I1007 14:49:46.918904 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:46Z","lastTransitionTime":"2025-10-07T14:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.021296 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.021343 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.021356 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.021375 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.021387 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.123764 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.123807 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.123819 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.123836 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.123847 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.226943 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.226990 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.227003 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.227056 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.227077 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.330398 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.330440 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.330452 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.330471 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.330486 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.433112 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.433161 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.433178 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.433197 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.433212 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.535556 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.535607 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.535617 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.535633 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.535643 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.637811 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.637856 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.637867 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.637885 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.637899 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.741201 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.741242 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.741252 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.741269 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.741282 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.843702 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.843735 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.843745 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.843758 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.843766 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.891366 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:47 crc kubenswrapper[4672]: E1007 14:49:47.891526 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.946109 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.946154 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.946167 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.946186 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:47 crc kubenswrapper[4672]: I1007 14:49:47.946198 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:47Z","lastTransitionTime":"2025-10-07T14:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.047971 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.047999 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.048007 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.048037 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.048053 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.150274 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.150309 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.150320 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.150333 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.150343 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.252548 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.252578 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.252585 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.252599 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.252608 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.354780 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.354812 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.354822 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.354835 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.354842 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.457237 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.457270 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.457279 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.457293 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.457302 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.559697 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.559740 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.559754 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.559799 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.559815 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.662233 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.662277 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.662286 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.662300 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.662310 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.764838 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.764887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.764898 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.764914 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.764926 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.866727 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.866772 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.866787 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.866803 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.866814 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.891206 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.891281 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.891206 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:48 crc kubenswrapper[4672]: E1007 14:49:48.891337 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:48 crc kubenswrapper[4672]: E1007 14:49:48.891396 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:48 crc kubenswrapper[4672]: E1007 14:49:48.891464 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.969435 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.969472 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.969483 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.969511 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:48 crc kubenswrapper[4672]: I1007 14:49:48.969521 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:48Z","lastTransitionTime":"2025-10-07T14:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.071330 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.071365 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.071373 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.071387 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.071396 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.173697 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.173739 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.173748 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.173763 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.173773 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.276006 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.276059 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.276070 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.276088 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.276099 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.378330 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.378366 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.378377 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.378393 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.378403 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.481182 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.481211 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.481220 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.481233 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.481242 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.584871 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.584919 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.584934 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.584949 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.584961 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.687345 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.687380 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.687388 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.687402 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.687411 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.789282 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.789319 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.789330 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.789346 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.789356 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.891065 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:49 crc kubenswrapper[4672]: E1007 14:49:49.891202 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.891371 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.891415 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.891423 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.891437 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.891447 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.993897 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.993932 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.993944 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.993960 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:49 crc kubenswrapper[4672]: I1007 14:49:49.993972 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:49Z","lastTransitionTime":"2025-10-07T14:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.095995 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.096094 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.096110 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.096126 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.096136 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.198883 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.198926 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.198936 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.198951 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.198962 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.301645 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.301691 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.301703 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.301720 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.301731 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.328809 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.328840 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.328849 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.328863 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.328873 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: E1007 14:49:50.341377 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:50Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.344854 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.344899 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.344908 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.344921 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.344930 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: E1007 14:49:50.356676 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:50Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.360359 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.360395 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.360407 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.360423 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.360435 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: E1007 14:49:50.373355 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:50Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.376459 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.376490 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.376498 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.376512 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.376521 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: E1007 14:49:50.387274 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:50Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.390317 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.390360 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.390372 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.390390 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.390408 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: E1007 14:49:50.402832 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:50Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:50 crc kubenswrapper[4672]: E1007 14:49:50.402998 4672 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.404285 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.404346 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.404357 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.404374 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.404387 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.506416 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.506457 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.506468 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.506484 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.506535 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.608917 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.608954 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.608964 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.608978 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.608988 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.711293 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.711325 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.711334 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.711346 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.711355 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.814135 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.814185 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.814200 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.814218 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.814231 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.891449 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.891508 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.891659 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:50 crc kubenswrapper[4672]: E1007 14:49:50.891572 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:50 crc kubenswrapper[4672]: E1007 14:49:50.892159 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:50 crc kubenswrapper[4672]: E1007 14:49:50.892233 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.895901 4672 scope.go:117] "RemoveContainer" containerID="fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.917523 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.917571 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.917582 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.917596 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:50 crc kubenswrapper[4672]: I1007 14:49:50.917607 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:50Z","lastTransitionTime":"2025-10-07T14:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.019665 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.019875 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.019893 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.020827 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.020849 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.123994 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.124054 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.124063 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.124078 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.124087 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.207708 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/2.log" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.209834 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.210992 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.226207 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.226239 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.226249 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.226264 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.226275 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.233492 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.252734 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.270328 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.300296 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.314764 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.329183 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.329241 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.329252 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.329288 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.329299 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.332369 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.343990 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.357528 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.370402 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93c9f521-38f1-422e-a980-a3e7b59e2187\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.382881 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.396189 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.407424 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.421393 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.431566 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.431604 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.431617 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.431634 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.431646 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.434497 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.443797 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.458195 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.478804 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:27Z\\\",\\\"message\\\":\\\"19615025667110816) with []\\\\nI1007 14:49:27.706596 6371 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1007 14:49:27.706682 6371 factory.go:1336] Added *v1.Node event handler 7\\\\nI1007 14:49:27.706736 6371 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1007 14:49:27.707086 6371 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707181 6371 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1007 14:49:27.707214 6371 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 14:49:27.707223 6371 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 14:49:27.707235 6371 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:27.707256 6371 factory.go:656] Stopping watch factory\\\\nI1007 14:49:27.707272 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:27.707299 6371 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 14:49:27.707308 6371 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:27.707313 6371 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707324 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:27.707399 6371 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:51Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.533542 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.533575 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.533583 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.533596 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.533606 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.636581 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.636620 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.636633 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.636649 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.636662 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.739299 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.739343 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.739354 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.739369 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.739383 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.841162 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.841205 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.841213 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.841231 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.841242 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.891680 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:51 crc kubenswrapper[4672]: E1007 14:49:51.891816 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.943393 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.943432 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.943442 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.943457 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:51 crc kubenswrapper[4672]: I1007 14:49:51.943469 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:51Z","lastTransitionTime":"2025-10-07T14:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.045948 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.045991 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.045999 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.046034 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.046044 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.148537 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.148590 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.148599 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.148614 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.148625 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.214244 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/0.log" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.214342 4672 generic.go:334] "Generic (PLEG): container finished" podID="a0a0e29e-f4b1-4573-b5a7-3dc297f92a62" containerID="04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b" exitCode=1 Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.214404 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l8k8z" event={"ID":"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62","Type":"ContainerDied","Data":"04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.214781 4672 scope.go:117] "RemoveContainer" containerID="04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.215942 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/3.log" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.216551 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/2.log" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.219528 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" exitCode=1 Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.219629 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.221161 4672 scope.go:117] "RemoveContainer" containerID="fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.221219 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 14:49:52 crc kubenswrapper[4672]: E1007 14:49:52.221759 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.237220 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.251771 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.253741 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.253807 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.253850 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.254009 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.254043 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.261940 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.274894 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.286535 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93c9f521-38f1-422e-a980-a3e7b59e2187\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.296560 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.308690 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"2025-10-07T14:49:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176\\\\n2025-10-07T14:49:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176 to /host/opt/cni/bin/\\\\n2025-10-07T14:49:06Z [verbose] multus-daemon started\\\\n2025-10-07T14:49:06Z [verbose] Readiness Indicator file check\\\\n2025-10-07T14:49:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.328356 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:27Z\\\",\\\"message\\\":\\\"19615025667110816) with []\\\\nI1007 14:49:27.706596 6371 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1007 14:49:27.706682 6371 factory.go:1336] Added *v1.Node event handler 7\\\\nI1007 14:49:27.706736 6371 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1007 14:49:27.707086 6371 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707181 6371 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1007 14:49:27.707214 6371 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 14:49:27.707223 6371 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 14:49:27.707235 6371 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:27.707256 6371 factory.go:656] Stopping watch factory\\\\nI1007 14:49:27.707272 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:27.707299 6371 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 14:49:27.707308 6371 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:27.707313 6371 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707324 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:27.707399 6371 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.341234 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.353233 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.356635 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.356668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.356677 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.356691 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.356699 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.367840 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.381297 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.393034 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.402814 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.414410 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.423245 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.434034 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.447163 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.459366 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.459407 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.459418 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.459433 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.459443 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.461356 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.472323 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.487229 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.501807 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.511665 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.522418 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.535234 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.546437 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93c9f521-38f1-422e-a980-a3e7b59e2187\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.558753 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.561417 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.561445 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.561452 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.561465 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.561476 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.570267 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.581714 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.593971 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.605632 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.615618 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.626795 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"2025-10-07T14:49:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176\\\\n2025-10-07T14:49:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176 to /host/opt/cni/bin/\\\\n2025-10-07T14:49:06Z [verbose] multus-daemon started\\\\n2025-10-07T14:49:06Z [verbose] Readiness Indicator file check\\\\n2025-10-07T14:49:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.642965 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc642309df356787a8723732bd649a1c4e1f0088d9e755d03176f9afabdbd36c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:27Z\\\",\\\"message\\\":\\\"19615025667110816) with []\\\\nI1007 14:49:27.706596 6371 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1007 14:49:27.706682 6371 factory.go:1336] Added *v1.Node event handler 7\\\\nI1007 14:49:27.706736 6371 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1007 14:49:27.707086 6371 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707181 6371 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1007 14:49:27.707214 6371 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 14:49:27.707223 6371 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 14:49:27.707235 6371 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 14:49:27.707256 6371 factory.go:656] Stopping watch factory\\\\nI1007 14:49:27.707272 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1007 14:49:27.707299 6371 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 14:49:27.707308 6371 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 14:49:27.707313 6371 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 14:49:27.707324 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 14:49:27.707399 6371 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"37 6678 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1007 14:49:51.706919 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1007 14:49:51.706945 6678 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1007 14:49:51.706967 6678 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706961 6678 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1007 14:49:51.706979 6678 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706984 6678 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-l8k8z in node crc\\\\nI1007 14:49:51.706989 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-mult\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:52Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.663386 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.663418 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.663427 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.663441 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.663451 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.766108 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.766153 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.766166 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.766184 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.766195 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.868631 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.868660 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.868668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.868686 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.868695 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.891638 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.891669 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:52 crc kubenswrapper[4672]: E1007 14:49:52.891806 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.891698 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:52 crc kubenswrapper[4672]: E1007 14:49:52.891983 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:52 crc kubenswrapper[4672]: E1007 14:49:52.892062 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.970657 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.970695 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.970706 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.970721 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:52 crc kubenswrapper[4672]: I1007 14:49:52.970729 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:52Z","lastTransitionTime":"2025-10-07T14:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.073454 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.073488 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.073497 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.073512 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.073523 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.175522 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.175562 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.175571 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.175587 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.175597 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.228741 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/3.log" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.233616 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/0.log" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.233673 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l8k8z" event={"ID":"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62","Type":"ContainerStarted","Data":"a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.234272 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 14:49:53 crc kubenswrapper[4672]: E1007 14:49:53.234530 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.243153 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.251824 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.262188 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.272384 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93c9f521-38f1-422e-a980-a3e7b59e2187\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.277547 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.277597 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.277610 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.277626 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.277638 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.283557 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.295093 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"2025-10-07T14:49:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176\\\\n2025-10-07T14:49:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176 to /host/opt/cni/bin/\\\\n2025-10-07T14:49:06Z [verbose] multus-daemon started\\\\n2025-10-07T14:49:06Z [verbose] Readiness Indicator file check\\\\n2025-10-07T14:49:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.312872 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"37 6678 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1007 14:49:51.706919 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1007 14:49:51.706945 6678 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1007 14:49:51.706967 6678 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706961 6678 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1007 14:49:51.706979 6678 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706984 6678 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-l8k8z in node crc\\\\nI1007 14:49:51.706989 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-mult\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.324910 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.337052 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.347202 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.360263 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.370914 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.380501 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.380685 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.380560 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.380776 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.380953 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.380965 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.393612 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.406329 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.416307 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.425542 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.435674 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.443512 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.451733 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.462063 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.472466 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.482715 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.483578 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.483693 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.483706 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.483719 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.483731 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.495145 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.504894 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93c9f521-38f1-422e-a980-a3e7b59e2187\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.514028 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.525069 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"2025-10-07T14:49:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176\\\\n2025-10-07T14:49:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176 to /host/opt/cni/bin/\\\\n2025-10-07T14:49:06Z [verbose] multus-daemon started\\\\n2025-10-07T14:49:06Z [verbose] Readiness Indicator file check\\\\n2025-10-07T14:49:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.541882 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"37 6678 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1007 14:49:51.706919 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1007 14:49:51.706945 6678 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1007 14:49:51.706967 6678 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706961 6678 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1007 14:49:51.706979 6678 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706984 6678 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-l8k8z in node crc\\\\nI1007 14:49:51.706989 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-mult\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.553422 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.563078 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.576624 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.585622 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.585649 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.585659 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.585694 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.585703 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.589072 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.599847 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.609061 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.687346 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.687393 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.687404 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.687419 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.687430 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.789633 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.789710 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.789719 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.789734 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.789743 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.890994 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:53 crc kubenswrapper[4672]: E1007 14:49:53.891259 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.892191 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.892219 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.892226 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.892241 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.892250 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.909455 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.923109 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.933284 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.946775 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93c9f521-38f1-422e-a980-a3e7b59e2187\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.959923 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.972183 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.984138 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.993833 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.993869 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.993891 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.993911 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.993921 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:53Z","lastTransitionTime":"2025-10-07T14:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:53 crc kubenswrapper[4672]: I1007 14:49:53.997078 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:53Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.007805 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:54Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.015471 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:54Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.027497 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"2025-10-07T14:49:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176\\\\n2025-10-07T14:49:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176 to /host/opt/cni/bin/\\\\n2025-10-07T14:49:06Z [verbose] multus-daemon started\\\\n2025-10-07T14:49:06Z [verbose] Readiness Indicator file check\\\\n2025-10-07T14:49:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:54Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.044734 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"37 6678 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1007 14:49:51.706919 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1007 14:49:51.706945 6678 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1007 14:49:51.706967 6678 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706961 6678 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1007 14:49:51.706979 6678 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706984 6678 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-l8k8z in node crc\\\\nI1007 14:49:51.706989 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-mult\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:54Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.056634 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:54Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.066812 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:54Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.079847 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:54Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.092496 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:54Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.095773 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.095804 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.095812 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.095826 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.095835 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:54Z","lastTransitionTime":"2025-10-07T14:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.104179 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:49:54Z is after 2025-08-24T17:21:41Z" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.197550 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.197581 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.197591 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.197607 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.197615 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:54Z","lastTransitionTime":"2025-10-07T14:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.299592 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.299627 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.299637 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.299653 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.299664 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:54Z","lastTransitionTime":"2025-10-07T14:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.402412 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.402715 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.402724 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.402738 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.402748 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:54Z","lastTransitionTime":"2025-10-07T14:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.505333 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.505385 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.505394 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.505410 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.505421 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:54Z","lastTransitionTime":"2025-10-07T14:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.607659 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.607704 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.607715 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.607734 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.607746 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:54Z","lastTransitionTime":"2025-10-07T14:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.710720 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.710950 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.711010 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.711140 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.711181 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:54Z","lastTransitionTime":"2025-10-07T14:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.813455 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.813504 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.813542 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.813565 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.813577 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:54Z","lastTransitionTime":"2025-10-07T14:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.890776 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.890834 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.890891 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:54 crc kubenswrapper[4672]: E1007 14:49:54.890927 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:54 crc kubenswrapper[4672]: E1007 14:49:54.891075 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:54 crc kubenswrapper[4672]: E1007 14:49:54.891203 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.915714 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.915763 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.915795 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.915808 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:54 crc kubenswrapper[4672]: I1007 14:49:54.915817 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:54Z","lastTransitionTime":"2025-10-07T14:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.017888 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.017918 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.017928 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.017940 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.017948 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.120083 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.120126 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.120192 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.120216 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.120228 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.222094 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.222140 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.222151 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.222168 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.222178 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.323821 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.323887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.323898 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.323913 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.323924 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.425996 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.426049 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.426058 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.426071 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.426080 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.528197 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.528232 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.528243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.528257 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.528267 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.630303 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.630362 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.630372 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.630389 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.630402 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.732200 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.732242 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.732251 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.732267 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.732277 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.834509 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.834546 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.834555 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.834570 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.834580 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.890881 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:55 crc kubenswrapper[4672]: E1007 14:49:55.891055 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.937123 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.937160 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.937172 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.937187 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:55 crc kubenswrapper[4672]: I1007 14:49:55.937198 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:55Z","lastTransitionTime":"2025-10-07T14:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.039976 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.040007 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.040036 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.040051 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.040061 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.142474 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.142512 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.142523 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.142537 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.142574 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.244182 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.244212 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.244221 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.244233 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.244244 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.346927 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.347048 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.347058 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.347121 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.347142 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.450541 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.450600 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.450611 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.450628 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.450642 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.553354 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.553416 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.553426 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.553440 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.553449 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.656054 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.656126 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.656138 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.656158 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.656170 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.758193 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.758239 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.758253 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.758269 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.758279 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.860862 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.860897 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.860905 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.860921 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.860930 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.891435 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.891491 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:56 crc kubenswrapper[4672]: E1007 14:49:56.891563 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.891439 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:56 crc kubenswrapper[4672]: E1007 14:49:56.891628 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:56 crc kubenswrapper[4672]: E1007 14:49:56.891716 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.963671 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.963710 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.963721 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.963744 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:56 crc kubenswrapper[4672]: I1007 14:49:56.963759 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:56Z","lastTransitionTime":"2025-10-07T14:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.066414 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.066454 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.066467 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.066485 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.066495 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.168088 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.168134 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.168147 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.168161 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.168170 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.270465 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.270493 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.270503 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.270518 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.270530 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.372989 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.373058 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.373072 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.373089 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.373097 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.475613 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.475654 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.475663 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.475679 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.475688 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.577781 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.577817 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.577825 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.577838 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.577847 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.680110 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.680229 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.680242 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.680258 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.680268 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.782550 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.782597 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.782610 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.782667 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.782682 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.884991 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.885083 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.885104 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.885125 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.885176 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.895404 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:57 crc kubenswrapper[4672]: E1007 14:49:57.895827 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.988708 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.988737 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.988745 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.988757 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:57 crc kubenswrapper[4672]: I1007 14:49:57.988765 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:57Z","lastTransitionTime":"2025-10-07T14:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.091609 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.091648 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.091662 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.091682 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.091694 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:58Z","lastTransitionTime":"2025-10-07T14:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.193933 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.193966 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.193974 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.193986 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.193995 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:58Z","lastTransitionTime":"2025-10-07T14:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.296729 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.296773 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.296782 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.296797 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.296807 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:58Z","lastTransitionTime":"2025-10-07T14:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.399133 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.399794 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.399832 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.399853 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.399865 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:58Z","lastTransitionTime":"2025-10-07T14:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.502256 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.502289 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.502301 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.502319 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.502329 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:58Z","lastTransitionTime":"2025-10-07T14:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.604458 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.604498 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.604507 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.604521 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.604531 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:58Z","lastTransitionTime":"2025-10-07T14:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.707445 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.707487 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.707496 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.707514 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.707525 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:58Z","lastTransitionTime":"2025-10-07T14:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.810243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.810288 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.810300 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.810317 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.810328 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:58Z","lastTransitionTime":"2025-10-07T14:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.890745 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.890769 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.890805 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:49:58 crc kubenswrapper[4672]: E1007 14:49:58.890864 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:49:58 crc kubenswrapper[4672]: E1007 14:49:58.890938 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:49:58 crc kubenswrapper[4672]: E1007 14:49:58.891040 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.912713 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.912748 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.912756 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.912768 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:58 crc kubenswrapper[4672]: I1007 14:49:58.912777 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:58Z","lastTransitionTime":"2025-10-07T14:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.015007 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.015050 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.015058 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.015071 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.015079 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.116886 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.116929 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.116940 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.116955 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.116967 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.219239 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.219277 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.219286 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.219299 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.219309 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.322790 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.322836 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.322850 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.322868 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.322879 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.424801 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.424834 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.424843 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.424856 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.424866 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.527883 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.527920 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.527932 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.527947 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.527966 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.630284 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.630325 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.630334 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.630348 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.630357 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.732388 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.732417 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.732426 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.732438 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.732447 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.834668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.834711 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.834723 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.834737 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.834746 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.891661 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:49:59 crc kubenswrapper[4672]: E1007 14:49:59.891848 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.937394 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.937434 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.937446 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.937478 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:49:59 crc kubenswrapper[4672]: I1007 14:49:59.937489 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:49:59Z","lastTransitionTime":"2025-10-07T14:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.040298 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.040349 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.040357 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.040372 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.040382 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.142709 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.142747 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.142755 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.142768 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.142777 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.244929 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.244976 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.244992 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.245009 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.245041 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.347332 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.347381 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.347390 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.347402 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.347413 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.449856 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.449894 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.449904 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.449920 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.449932 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.552190 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.552237 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.552247 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.552264 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.552274 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.654159 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.654210 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.654229 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.654243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.654254 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.756411 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.756442 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.756450 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.756464 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.756474 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.795103 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.795151 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.795162 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.795182 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.795194 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: E1007 14:50:00.808224 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:00Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.814817 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.814866 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.814879 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.814896 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.814910 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: E1007 14:50:00.828836 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:00Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.833787 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.833845 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.833856 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.833874 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.833888 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: E1007 14:50:00.847214 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:00Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.852377 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.852439 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.852459 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.852488 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.852507 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: E1007 14:50:00.866940 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:00Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.871206 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.871251 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.871264 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.871283 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.871296 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: E1007 14:50:00.882226 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:00Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:00 crc kubenswrapper[4672]: E1007 14:50:00.882343 4672 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.883806 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.883836 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.883845 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.883860 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.883871 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.891257 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.891289 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.891324 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:00 crc kubenswrapper[4672]: E1007 14:50:00.891397 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:00 crc kubenswrapper[4672]: E1007 14:50:00.891480 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:00 crc kubenswrapper[4672]: E1007 14:50:00.891571 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.985924 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.985962 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.985972 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.985987 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:00 crc kubenswrapper[4672]: I1007 14:50:00.985998 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:00Z","lastTransitionTime":"2025-10-07T14:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.088200 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.088236 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.088247 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.088262 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.088272 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:01Z","lastTransitionTime":"2025-10-07T14:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.190031 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.190075 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.190084 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.190300 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.190309 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:01Z","lastTransitionTime":"2025-10-07T14:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.292887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.292928 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.292941 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.292957 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.292967 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:01Z","lastTransitionTime":"2025-10-07T14:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.395332 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.395366 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.395376 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.395391 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.395402 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:01Z","lastTransitionTime":"2025-10-07T14:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.497276 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.497319 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.497332 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.497345 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.497353 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:01Z","lastTransitionTime":"2025-10-07T14:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.599640 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.599684 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.599697 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.599710 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.599718 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:01Z","lastTransitionTime":"2025-10-07T14:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.702399 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.702435 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.702445 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.702459 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.702473 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:01Z","lastTransitionTime":"2025-10-07T14:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.805079 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.805138 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.805148 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.805160 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.805171 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:01Z","lastTransitionTime":"2025-10-07T14:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.891343 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:01 crc kubenswrapper[4672]: E1007 14:50:01.891478 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.908228 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.908270 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.908281 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.908297 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:01 crc kubenswrapper[4672]: I1007 14:50:01.908336 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:01Z","lastTransitionTime":"2025-10-07T14:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.011208 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.011309 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.011325 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.011354 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.011373 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.113343 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.113382 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.113393 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.113409 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.113421 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.215725 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.215771 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.215788 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.215803 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.215813 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.318189 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.318225 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.318235 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.318251 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.318261 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.420536 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.420579 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.420588 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.420607 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.420616 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.523328 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.523375 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.523386 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.523750 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.523781 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.626605 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.626643 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.626660 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.626677 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.626690 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.729366 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.729408 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.729418 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.729432 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.729442 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.831179 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.831243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.831254 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.831269 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.831279 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.891799 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.891799 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:02 crc kubenswrapper[4672]: E1007 14:50:02.892107 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.891844 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:02 crc kubenswrapper[4672]: E1007 14:50:02.892238 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:02 crc kubenswrapper[4672]: E1007 14:50:02.892335 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.934339 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.934383 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.934393 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.934407 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:02 crc kubenswrapper[4672]: I1007 14:50:02.934418 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:02Z","lastTransitionTime":"2025-10-07T14:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.037318 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.037362 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.037374 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.037393 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.037405 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:03Z","lastTransitionTime":"2025-10-07T14:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.139842 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.139890 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.139904 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.139919 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.139929 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:03Z","lastTransitionTime":"2025-10-07T14:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.243152 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.243202 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.243214 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.243232 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.243246 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:03Z","lastTransitionTime":"2025-10-07T14:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.345647 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.345729 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.345754 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.345786 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.345808 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:03Z","lastTransitionTime":"2025-10-07T14:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.448539 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.448571 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.448579 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.448592 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.448602 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:03Z","lastTransitionTime":"2025-10-07T14:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.550640 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.550671 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.550679 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.550691 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.550699 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:03Z","lastTransitionTime":"2025-10-07T14:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.652871 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.652905 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.652913 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.652927 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.652939 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:03Z","lastTransitionTime":"2025-10-07T14:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.755404 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.755439 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.755448 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.755460 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.755471 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:03Z","lastTransitionTime":"2025-10-07T14:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.857496 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.857532 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.857540 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.857554 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.857564 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:03Z","lastTransitionTime":"2025-10-07T14:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:03 crc kubenswrapper[4672]: I1007 14:50:03.891587 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:03 crc kubenswrapper[4672]: E1007 14:50:03.891821 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.010531 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.010576 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.010586 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.010601 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.010610 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.014490 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.027435 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93c9f521-38f1-422e-a980-a3e7b59e2187\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.040204 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.052607 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.062891 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.076763 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.089161 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.100077 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.112323 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"2025-10-07T14:49:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176\\\\n2025-10-07T14:49:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176 to /host/opt/cni/bin/\\\\n2025-10-07T14:49:06Z [verbose] multus-daemon started\\\\n2025-10-07T14:49:06Z [verbose] Readiness Indicator file check\\\\n2025-10-07T14:49:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.113610 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.113647 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.113657 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.113675 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.113689 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.131763 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"37 6678 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1007 14:49:51.706919 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1007 14:49:51.706945 6678 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1007 14:49:51.706967 6678 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706961 6678 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1007 14:49:51.706979 6678 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706984 6678 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-l8k8z in node crc\\\\nI1007 14:49:51.706989 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-mult\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.148244 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.163324 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.175105 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.198092 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.216224 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.216271 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.216284 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.216304 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.216319 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.234772 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.251972 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.264422 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:04Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.318540 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.318593 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.318605 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.318622 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.318634 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.421307 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.421344 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.421353 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.421368 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.421379 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.523431 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.523498 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.523509 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.523522 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.523531 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.626162 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.626201 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.626210 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.626224 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.626237 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.728204 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.728247 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.728255 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.728270 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.728280 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.830606 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.830674 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.830689 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.830709 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.830724 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.890742 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.890767 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.890843 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:04 crc kubenswrapper[4672]: E1007 14:50:04.890956 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:04 crc kubenswrapper[4672]: E1007 14:50:04.891055 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:04 crc kubenswrapper[4672]: E1007 14:50:04.891174 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.901160 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.932881 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.932913 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.932921 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.932940 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:04 crc kubenswrapper[4672]: I1007 14:50:04.932949 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:04Z","lastTransitionTime":"2025-10-07T14:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.035278 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.035327 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.035339 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.035355 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.035367 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.137959 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.137999 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.138043 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.138066 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.138093 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.239846 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.239895 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.239907 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.239925 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.239935 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.341778 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.341806 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.341815 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.341828 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.341836 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.444124 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.444168 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.444179 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.444195 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.444204 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.545893 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.545920 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.545928 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.545940 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.545947 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.648243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.648270 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.648289 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.648302 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.648310 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.750542 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.750583 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.750592 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.750608 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.750617 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.853490 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.853536 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.853546 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.853560 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.853570 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.891010 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.892601 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 14:50:05 crc kubenswrapper[4672]: E1007 14:50:05.892839 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:50:05 crc kubenswrapper[4672]: E1007 14:50:05.895001 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.955861 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.955938 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.955950 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.955967 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:05 crc kubenswrapper[4672]: I1007 14:50:05.955977 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:05Z","lastTransitionTime":"2025-10-07T14:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.058614 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.058663 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.058671 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.058685 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.058693 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.161177 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.161204 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.161212 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.161224 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.161233 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.264259 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.264304 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.264316 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.264331 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.264346 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.366477 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.366507 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.366515 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.366527 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.366537 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.468859 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.468894 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.468906 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.468918 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.468927 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.570476 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.570507 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.570516 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.570530 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.570539 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.673350 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.673580 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.673588 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.673604 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.673613 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.775826 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.775878 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.775891 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.775909 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.775924 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.858869 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.858980 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.859007 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.859080 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.859100 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859220 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859241 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859253 4672 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859247 4672 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859307 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.859272699 +0000 UTC m=+147.834451300 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859269 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859339 4672 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859346 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.85933176 +0000 UTC m=+147.834510471 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859358 4672 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859440 4672 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859382 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.859369651 +0000 UTC m=+147.834548342 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859547 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.859506935 +0000 UTC m=+147.834685536 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.859573 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.859560897 +0000 UTC m=+147.834739598 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.878074 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.878136 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.878147 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.878163 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.878174 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.891431 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.891468 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.891603 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.891794 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.891897 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:06 crc kubenswrapper[4672]: E1007 14:50:06.891950 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.980861 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.980898 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.980907 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.980919 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:06 crc kubenswrapper[4672]: I1007 14:50:06.980928 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:06Z","lastTransitionTime":"2025-10-07T14:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.083101 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.083136 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.083145 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.083160 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.083172 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:07Z","lastTransitionTime":"2025-10-07T14:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.185168 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.185216 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.185226 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.185243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.185255 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:07Z","lastTransitionTime":"2025-10-07T14:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.287356 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.287403 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.287417 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.287436 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.287451 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:07Z","lastTransitionTime":"2025-10-07T14:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.389852 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.389886 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.389895 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.389908 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.389918 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:07Z","lastTransitionTime":"2025-10-07T14:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.492934 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.492973 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.492985 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.493001 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.493031 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:07Z","lastTransitionTime":"2025-10-07T14:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.595319 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.595363 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.595378 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.595393 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.595404 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:07Z","lastTransitionTime":"2025-10-07T14:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.697815 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.697872 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.697885 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.697900 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.697911 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:07Z","lastTransitionTime":"2025-10-07T14:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.799568 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.799606 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.799618 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.799635 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.799647 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:07Z","lastTransitionTime":"2025-10-07T14:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.891108 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:07 crc kubenswrapper[4672]: E1007 14:50:07.891278 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.901847 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.901894 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.901906 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.901921 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.901932 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:07Z","lastTransitionTime":"2025-10-07T14:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:07 crc kubenswrapper[4672]: I1007 14:50:07.968718 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:07 crc kubenswrapper[4672]: E1007 14:50:07.968876 4672 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:50:07 crc kubenswrapper[4672]: E1007 14:50:07.968965 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs podName:3bab8ff7-6484-479d-9423-0ce0c8f7beff nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.968943292 +0000 UTC m=+148.944121893 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs") pod "network-metrics-daemon-mfxdl" (UID: "3bab8ff7-6484-479d-9423-0ce0c8f7beff") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.004343 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.004411 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.004420 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.004433 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.004442 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.106806 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.106853 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.106869 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.106886 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.106896 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.208950 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.208991 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.209005 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.209051 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.209065 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.310928 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.310959 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.310968 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.310981 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.310992 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.415497 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.415532 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.415543 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.415559 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.415570 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.518000 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.518057 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.518070 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.518085 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.518097 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.621901 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.621946 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.621959 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.621975 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.621986 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.724708 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.724757 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.724770 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.724786 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.724799 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.826481 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.826528 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.826542 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.826558 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.826568 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.891558 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.891659 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:08 crc kubenswrapper[4672]: E1007 14:50:08.891766 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.891773 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:08 crc kubenswrapper[4672]: E1007 14:50:08.891865 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:08 crc kubenswrapper[4672]: E1007 14:50:08.892076 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.928553 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.928587 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.928595 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.928608 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:08 crc kubenswrapper[4672]: I1007 14:50:08.928617 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:08Z","lastTransitionTime":"2025-10-07T14:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.031862 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.031901 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.031910 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.031922 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.031931 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.134640 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.134689 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.134704 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.134720 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.134731 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.238317 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.238379 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.238392 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.238409 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.238422 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.341229 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.341380 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.341403 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.341439 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.341462 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.444300 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.444357 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.444369 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.444391 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.444405 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.548350 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.548402 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.548415 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.548434 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.548448 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.651721 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.651775 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.651790 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.651806 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.651818 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.754295 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.754411 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.754423 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.754439 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.754453 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.856953 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.857008 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.857054 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.857077 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.857094 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.891555 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:09 crc kubenswrapper[4672]: E1007 14:50:09.891701 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.959192 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.959248 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.959265 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.959287 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:09 crc kubenswrapper[4672]: I1007 14:50:09.959302 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:09Z","lastTransitionTime":"2025-10-07T14:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.061320 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.061384 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.061395 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.061430 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.061445 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.163457 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.163488 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.163497 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.163512 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.163522 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.265441 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.265517 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.265528 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.265541 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.265549 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.367989 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.368047 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.368065 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.368114 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.368136 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.470338 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.470383 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.470393 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.470410 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.470421 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.573139 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.573187 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.573199 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.573216 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.573228 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.675725 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.675771 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.675780 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.675794 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.675810 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.777888 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.777940 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.777952 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.777972 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.777986 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.880807 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.880887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.880906 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.880926 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.880942 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.891389 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.891556 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:10 crc kubenswrapper[4672]: E1007 14:50:10.891632 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.891719 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:10 crc kubenswrapper[4672]: E1007 14:50:10.891849 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:10 crc kubenswrapper[4672]: E1007 14:50:10.891936 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.984899 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.984968 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.984984 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.985006 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:10 crc kubenswrapper[4672]: I1007 14:50:10.985045 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:10Z","lastTransitionTime":"2025-10-07T14:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.088708 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.088776 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.088793 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.088815 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.088829 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.102100 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.102187 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.102205 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.102235 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.102258 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: E1007 14:50:11.120310 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.124997 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.125096 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.125116 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.125142 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.125160 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: E1007 14:50:11.141459 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.147991 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.148068 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.148086 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.148106 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.148117 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: E1007 14:50:11.162098 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.167161 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.167226 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.167242 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.167266 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.167278 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: E1007 14:50:11.182381 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.187129 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.187175 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.187184 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.187202 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.187217 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: E1007 14:50:11.200306 4672 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T14:50:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa7996fa-f726-455b-ba8a-95e8a3b764cf\\\",\\\"systemUUID\\\":\\\"e258f24a-a647-4fb0-b924-1d4075da0e45\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:11Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:11 crc kubenswrapper[4672]: E1007 14:50:11.200523 4672 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.202644 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.202684 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.202696 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.202718 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.202730 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.304762 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.304806 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.304817 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.304832 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.304841 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.407612 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.407678 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.407690 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.407711 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.407726 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.510970 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.511260 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.511281 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.511305 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.511317 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.613866 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.613907 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.613917 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.613932 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.613940 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.716576 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.716641 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.716656 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.716677 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.716694 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.819554 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.819610 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.819621 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.819637 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.819649 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.891292 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:11 crc kubenswrapper[4672]: E1007 14:50:11.891449 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.921937 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.921983 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.922000 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.922038 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:11 crc kubenswrapper[4672]: I1007 14:50:11.922049 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:11Z","lastTransitionTime":"2025-10-07T14:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.024963 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.025064 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.025080 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.025104 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.025120 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.127598 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.127637 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.127653 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.127669 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.127682 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.231135 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.231208 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.231224 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.231256 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.231276 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.333865 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.333924 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.333940 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.333968 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.333987 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.438770 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.438829 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.438857 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.438882 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.438897 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.542948 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.543058 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.543079 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.543110 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.543131 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.646886 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.646972 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.646998 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.647072 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.647095 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.750193 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.750279 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.750299 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.750327 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.750349 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.854577 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.854672 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.854699 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.854728 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.854751 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.891648 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.891767 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.892429 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:12 crc kubenswrapper[4672]: E1007 14:50:12.893088 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:12 crc kubenswrapper[4672]: E1007 14:50:12.893429 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:12 crc kubenswrapper[4672]: E1007 14:50:12.893532 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.957473 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.957504 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.957514 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.957533 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:12 crc kubenswrapper[4672]: I1007 14:50:12.957551 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:12Z","lastTransitionTime":"2025-10-07T14:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.062470 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.062531 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.062556 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.062582 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.062598 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.167360 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.167423 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.167433 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.167458 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.167471 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.271814 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.271911 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.271939 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.271979 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.272004 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.375598 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.375648 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.375659 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.375681 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.375697 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.478976 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.479049 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.479061 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.479077 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.479086 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.581498 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.581563 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.581578 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.581600 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.581612 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.685570 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.686548 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.686629 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.686723 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.686755 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.789211 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.789249 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.789258 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.789271 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.789280 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.890935 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:13 crc kubenswrapper[4672]: E1007 14:50:13.891178 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.893321 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.893379 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.893390 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.893411 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.893422 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.914894 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.931234 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lk2x7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"42ada921-288b-41fa-a167-6c9b5b8be19c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6716aafa8c13128752271b5fdd42154222d5a4f2380cdd497e3a314baebe352a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gtk9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lk2x7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.954837 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l8k8z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"2025-10-07T14:49:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176\\\\n2025-10-07T14:49:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_08e644c8-1063-4d17-a5f4-2c4321d18176 to /host/opt/cni/bin/\\\\n2025-10-07T14:49:06Z [verbose] multus-daemon started\\\\n2025-10-07T14:49:06Z [verbose] Readiness Indicator file check\\\\n2025-10-07T14:49:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zgg9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l8k8z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.975390 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be16af59-c2d0-4922-803f-bf1544dd0973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T14:49:51Z\\\",\\\"message\\\":\\\"37 6678 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1007 14:49:51.706919 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1007 14:49:51.706945 6678 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1007 14:49:51.706967 6678 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706961 6678 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1007 14:49:51.706979 6678 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-l8k8z\\\\nI1007 14:49:51.706984 6678 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-l8k8z in node crc\\\\nI1007 14:49:51.706989 6678 obj_retry.go:386] Retry successful for *v1.Pod openshift-mult\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sps29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bqr7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.995061 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c50f64d3a29c8a9fd6fc4a67677c04f85bb2fd6bb6ee4267d2ef07bb8042ce4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:13Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.997065 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.997099 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.997109 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.997128 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:13 crc kubenswrapper[4672]: I1007 14:50:13.997139 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:13Z","lastTransitionTime":"2025-10-07T14:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.010673 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"492d7244-71b2-4f06-bb99-2f4069a8198c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b219e63f2993b4543e7e51b67dbc42cb7c09221b1e56f9554b138bc251347d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lk58h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mklmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.067846 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35368809-f456-4e20-8b5f-25442aca1cac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b315a59cb67d08f3938432d1c00f39ead14ea61630535f220ec21196f4aadc14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f5dd0f7434adeebc388b68207bf7ad26b1e9252f22ece4396a873475d2f50e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57c8d44bd14a3b5941c305715c50f5a4268db0fe9d6ee03a7a608fefb945ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://900919d83aa55c5b5926c61576a567c46c0d89b865e335cc72721cbd12167889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55b6a3dd39b1da9a90dd76ea90f32a40f1b95942e93caea0069eb4ef3f89516e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f58d5149d362a0c58fd016dd8da7193ee35e1309c15909b3e6fd8b77de4080c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7491046c6a89510a01bed95973384ada441dd5586e39e5005fe93f40a2021381\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:49:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld2v5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7n8j7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.081903 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b189993cbb9b00d2e4e717eb354765a3b0c5ff7128ba575c3c5747d99e5731f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cd02d67b54813435c78641c5ff495816bacb80576c07d4ecfeea8e7041388e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.094232 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.099399 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.099427 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.099435 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.099468 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.099478 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:14Z","lastTransitionTime":"2025-10-07T14:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.107188 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b415edcf-67a9-406e-9158-263831ea1b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://496ec330045b4a69961c7970d55a6ab86702ca0d54386e9e458ffccbef24f3e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ec772227c0c8ec302db819e650a7a29b4ece4e6498c9838d3dda1f7145318fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqb5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bng2p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.122567 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f9be2db-248e-4d08-8ce9-32fe8472c6ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3989de2077b714e1ed0348084852b02a3a5f3ade470d795eee55a2cec5522c56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96c28756d230e71b074ae673b32c1220423cdf57d15c096b823a5f240efa282f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f368bf313ddd36290a6d94dac979b51094e98978221d13124f15c6e6b04e6534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40667ea3c6665488ac4ac9885039be1d08c1b5fe96cfa41925ffa5ca2cf1d358\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d22dd5feb9728913b8187bf86a5d10bafa667d9d0f7f13ee554f233738fbd1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8675aab31f2332f98fe56e10899d7b758ec7db38f948646801a62707543c6f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.132429 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zgwqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17547458-b00b-4f76-8399-374b637285f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://396b812e80b47bafe2fe848931a22ddc440792bec6babbfd5ebcb5ec3c02cbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-flv77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zgwqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.142552 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93c9f521-38f1-422e-a980-a3e7b59e2187\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00412e51885fbbe35c0df6dcdc7be02cb133c4c1461ad7c2ca9d3ca37ebdf376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af7163d2d5c549866eaecf3ec52ff4100e23ac6ed463d434e86d10800c43e1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab8d1b4d4e4c456661f65e4b9e521afb80738766f9a0e33ee13902c3e9f76c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab79362187e2d079cd7a264481c318dda7e69b7ac9fa34f90ac04cf4315460ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.154635 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.165576 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02601ddfe7661b228ff381c10a7f91e02f98a2dd7a293d092aecc07997108897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:49:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.175721 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bab8ff7-6484-479d-9423-0ce0c8f7beff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8j4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:49:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mfxdl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.186344 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f606600d-a8a0-41c4-9e2d-7385da2ebed0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5cb78016f9669053f8fa1c0bdb04a9b9b75858ca3b2ed0014a7ae7e387b6d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4427192faf97eb1a9d0f23f1128c244733f94622d34ee70e4a3d5829cbba7d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4427192faf97eb1a9d0f23f1128c244733f94622d34ee70e4a3d5829cbba7d67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.201436 4672 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0617677c-252c-4f98-8187-3732e3bae5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:49:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T14:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9252660d2e8a23b081489a724d89cc9aca375eaab096ec18bc8a17e7e487d1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a41375d39097cf879e60fb5fa514a068435caf35f603b5a495a2337e4a485f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd2e211d9d273f7933326da443dc35952fb5ac5cad0462023909593115016b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08ed46c5e578aa81a703ed67843236a0f281e96b7dcbeeb74e768ce686235a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T14:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T14:48:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T14:50:14Z is after 2025-08-24T17:21:41Z" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.201549 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.201607 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.201621 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.201638 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.201649 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:14Z","lastTransitionTime":"2025-10-07T14:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.304511 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.304549 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.304559 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.304574 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.304585 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:14Z","lastTransitionTime":"2025-10-07T14:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.407440 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.407481 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.407491 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.407508 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.407520 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:14Z","lastTransitionTime":"2025-10-07T14:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.509855 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.509897 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.509912 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.509942 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.509953 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:14Z","lastTransitionTime":"2025-10-07T14:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.612605 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.612656 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.612667 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.612686 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.612701 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:14Z","lastTransitionTime":"2025-10-07T14:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.715754 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.715822 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.715843 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.715869 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.715888 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:14Z","lastTransitionTime":"2025-10-07T14:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.819894 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.820069 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.820097 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.820137 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.820161 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:14Z","lastTransitionTime":"2025-10-07T14:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.891524 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.891593 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.891524 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:14 crc kubenswrapper[4672]: E1007 14:50:14.891803 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:14 crc kubenswrapper[4672]: E1007 14:50:14.892062 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:14 crc kubenswrapper[4672]: E1007 14:50:14.892140 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.924284 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.924334 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.924343 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.924364 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:14 crc kubenswrapper[4672]: I1007 14:50:14.924377 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:14Z","lastTransitionTime":"2025-10-07T14:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.027755 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.027831 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.027844 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.027860 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.027871 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.130821 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.130878 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.130887 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.130909 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.130918 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.233940 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.234050 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.234076 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.234103 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.234125 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.336724 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.336777 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.336790 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.336805 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.336817 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.439183 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.439244 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.439268 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.439296 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.439317 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.541608 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.541647 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.541659 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.541672 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.541681 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.645076 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.645137 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.645163 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.645192 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.645215 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.747537 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.747590 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.747607 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.747627 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.747677 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.850297 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.850349 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.850363 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.850384 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.850399 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.890957 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:15 crc kubenswrapper[4672]: E1007 14:50:15.891317 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.953397 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.953441 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.953450 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.953464 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:15 crc kubenswrapper[4672]: I1007 14:50:15.953473 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:15Z","lastTransitionTime":"2025-10-07T14:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.055414 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.055454 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.055463 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.055476 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.055485 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.157574 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.157634 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.157646 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.157660 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.157670 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.259519 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.259572 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.259585 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.259600 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.259613 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.361481 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.361525 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.361535 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.361550 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.361560 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.463384 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.463425 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.463437 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.463451 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.463460 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.565419 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.565451 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.565469 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.565487 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.565498 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.667922 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.667973 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.667988 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.668006 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.668050 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.770450 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.770486 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.770494 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.770507 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.770519 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.872566 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.872598 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.872606 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.872618 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.872628 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.891342 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.891342 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:16 crc kubenswrapper[4672]: E1007 14:50:16.891472 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.891350 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:16 crc kubenswrapper[4672]: E1007 14:50:16.891549 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:16 crc kubenswrapper[4672]: E1007 14:50:16.891680 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.903874 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.974271 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.974331 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.974348 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.974367 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:16 crc kubenswrapper[4672]: I1007 14:50:16.974378 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:16Z","lastTransitionTime":"2025-10-07T14:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.076508 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.076543 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.076558 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.076577 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.076591 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:17Z","lastTransitionTime":"2025-10-07T14:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.178639 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.178672 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.178682 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.178698 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.178709 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:17Z","lastTransitionTime":"2025-10-07T14:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.283093 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.283130 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.283139 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.283154 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.283164 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:17Z","lastTransitionTime":"2025-10-07T14:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.386458 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.386553 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.386568 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.386590 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.386606 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:17Z","lastTransitionTime":"2025-10-07T14:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.489686 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.489724 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.489733 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.489745 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.489756 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:17Z","lastTransitionTime":"2025-10-07T14:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.592805 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.592852 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.592863 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.592878 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.592887 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:17Z","lastTransitionTime":"2025-10-07T14:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.695777 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.695822 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.695833 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.695849 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.695864 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:17Z","lastTransitionTime":"2025-10-07T14:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.798844 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.798894 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.798903 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.798918 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.798930 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:17Z","lastTransitionTime":"2025-10-07T14:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.891853 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:17 crc kubenswrapper[4672]: E1007 14:50:17.892036 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.900718 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.900771 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.900780 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.900798 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:17 crc kubenswrapper[4672]: I1007 14:50:17.900807 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:17Z","lastTransitionTime":"2025-10-07T14:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.002849 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.002905 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.002914 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.002929 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.002939 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.105869 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.105920 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.105929 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.105945 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.105956 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.208510 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.208577 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.208591 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.208610 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.208624 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.310992 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.311069 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.311082 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.311097 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.311109 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.414910 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.415044 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.415064 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.415100 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.415124 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.518605 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.518672 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.518687 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.518707 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.518721 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.622757 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.622816 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.622826 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.622849 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.622862 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.726180 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.726225 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.726237 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.726256 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.726269 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.830062 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.830151 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.830164 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.830209 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.830227 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.890934 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.890959 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:18 crc kubenswrapper[4672]: E1007 14:50:18.891107 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.891122 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:18 crc kubenswrapper[4672]: E1007 14:50:18.891434 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:18 crc kubenswrapper[4672]: E1007 14:50:18.891588 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.891821 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 14:50:18 crc kubenswrapper[4672]: E1007 14:50:18.892105 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.932782 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.932833 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.932845 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.932858 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:18 crc kubenswrapper[4672]: I1007 14:50:18.932870 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:18Z","lastTransitionTime":"2025-10-07T14:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.034995 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.035047 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.035057 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.035076 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.035089 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.137835 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.137918 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.137936 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.137961 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.137976 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.239885 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.239926 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.239934 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.239946 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.239985 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.342144 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.342191 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.342205 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.342220 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.342231 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.444280 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.444320 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.444331 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.444345 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.444356 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.546593 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.546629 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.546638 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.546651 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.546660 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.650236 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.650325 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.650350 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.650381 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.650407 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.752945 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.753000 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.753010 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.753056 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.753070 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.855010 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.855063 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.855073 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.855087 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.855098 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.891594 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:19 crc kubenswrapper[4672]: E1007 14:50:19.891716 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.957175 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.957228 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.957241 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.957259 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:19 crc kubenswrapper[4672]: I1007 14:50:19.957270 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:19Z","lastTransitionTime":"2025-10-07T14:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.059780 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.059825 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.059836 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.059850 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.059861 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.161587 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.161621 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.161631 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.161643 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.161652 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.264741 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.264796 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.264809 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.264825 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.264838 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.366978 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.367009 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.367043 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.367062 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.367079 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.469285 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.469323 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.469331 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.469344 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.469352 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.572084 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.572351 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.572433 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.572496 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.572559 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.675258 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.675290 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.675298 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.675311 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.675321 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.781155 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.781243 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.781341 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.781382 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.781688 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.883751 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.884276 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.884377 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.884487 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.884588 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.891111 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.891135 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:20 crc kubenswrapper[4672]: E1007 14:50:20.891403 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:20 crc kubenswrapper[4672]: E1007 14:50:20.891320 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.891153 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:20 crc kubenswrapper[4672]: E1007 14:50:20.891461 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.987514 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.987552 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.987565 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.987580 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:20 crc kubenswrapper[4672]: I1007 14:50:20.987591 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:20Z","lastTransitionTime":"2025-10-07T14:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.089288 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.089323 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.089331 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.089345 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.089354 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:21Z","lastTransitionTime":"2025-10-07T14:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.191627 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.191659 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.191668 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.191680 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.191688 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:21Z","lastTransitionTime":"2025-10-07T14:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.293794 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.294106 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.294185 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.294251 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.294331 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:21Z","lastTransitionTime":"2025-10-07T14:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.396226 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.396263 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.396274 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.396290 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.396300 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:21Z","lastTransitionTime":"2025-10-07T14:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.498618 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.498909 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.498992 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.499138 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.499244 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:21Z","lastTransitionTime":"2025-10-07T14:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.568936 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.569007 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.569037 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.569053 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.569064 4672 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T14:50:21Z","lastTransitionTime":"2025-10-07T14:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.613436 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz"] Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.613858 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.616059 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.616156 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.616207 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.616423 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.658543 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podStartSLOduration=78.658524415 podStartE2EDuration="1m18.658524415s" podCreationTimestamp="2025-10-07 14:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.639593091 +0000 UTC m=+98.614771672" watchObservedRunningTime="2025-10-07 14:50:21.658524415 +0000 UTC m=+98.633702986" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.672254 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-7n8j7" podStartSLOduration=78.672236396 podStartE2EDuration="1m18.672236396s" podCreationTimestamp="2025-10-07 14:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.65836452 +0000 UTC m=+98.633543111" watchObservedRunningTime="2025-10-07 14:50:21.672236396 +0000 UTC m=+98.647414977" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.691885 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-zgwqx" podStartSLOduration=79.69186245 podStartE2EDuration="1m19.69186245s" podCreationTimestamp="2025-10-07 14:49:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.682654771 +0000 UTC m=+98.657833362" watchObservedRunningTime="2025-10-07 14:50:21.69186245 +0000 UTC m=+98.667041031" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.707873 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=79.707859608 podStartE2EDuration="1m19.707859608s" podCreationTimestamp="2025-10-07 14:49:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.707499167 +0000 UTC m=+98.682677768" watchObservedRunningTime="2025-10-07 14:50:21.707859608 +0000 UTC m=+98.683038189" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.707984 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bng2p" podStartSLOduration=76.707981082 podStartE2EDuration="1m16.707981082s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.692122638 +0000 UTC m=+98.667301219" watchObservedRunningTime="2025-10-07 14:50:21.707981082 +0000 UTC m=+98.683159663" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.732876 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=72.732864079 podStartE2EDuration="1m12.732864079s" podCreationTimestamp="2025-10-07 14:49:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.721992101 +0000 UTC m=+98.697170682" watchObservedRunningTime="2025-10-07 14:50:21.732864079 +0000 UTC m=+98.708042660" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.732969 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=44.732966852 podStartE2EDuration="44.732966852s" podCreationTimestamp="2025-10-07 14:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.732621432 +0000 UTC m=+98.707800013" watchObservedRunningTime="2025-10-07 14:50:21.732966852 +0000 UTC m=+98.708145433" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.740087 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/75da04be-79ea-4c5a-9309-25cfc208608e-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.740158 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/75da04be-79ea-4c5a-9309-25cfc208608e-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.740180 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75da04be-79ea-4c5a-9309-25cfc208608e-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.740196 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/75da04be-79ea-4c5a-9309-25cfc208608e-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.740224 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75da04be-79ea-4c5a-9309-25cfc208608e-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.779077 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=17.77905853 podStartE2EDuration="17.77905853s" podCreationTimestamp="2025-10-07 14:50:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.77869118 +0000 UTC m=+98.753869791" watchObservedRunningTime="2025-10-07 14:50:21.77905853 +0000 UTC m=+98.754237111" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.813257 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-lk2x7" podStartSLOduration=78.81323739 podStartE2EDuration="1m18.81323739s" podCreationTimestamp="2025-10-07 14:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.812973052 +0000 UTC m=+98.788151653" watchObservedRunningTime="2025-10-07 14:50:21.81323739 +0000 UTC m=+98.788415971" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.824100 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-l8k8z" podStartSLOduration=78.824084827 podStartE2EDuration="1m18.824084827s" podCreationTimestamp="2025-10-07 14:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.8231488 +0000 UTC m=+98.798327381" watchObservedRunningTime="2025-10-07 14:50:21.824084827 +0000 UTC m=+98.799263408" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.840828 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/75da04be-79ea-4c5a-9309-25cfc208608e-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.840868 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75da04be-79ea-4c5a-9309-25cfc208608e-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.840890 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/75da04be-79ea-4c5a-9309-25cfc208608e-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.840922 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75da04be-79ea-4c5a-9309-25cfc208608e-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.840948 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/75da04be-79ea-4c5a-9309-25cfc208608e-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.841301 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/75da04be-79ea-4c5a-9309-25cfc208608e-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.841458 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/75da04be-79ea-4c5a-9309-25cfc208608e-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.842297 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/75da04be-79ea-4c5a-9309-25cfc208608e-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.847623 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75da04be-79ea-4c5a-9309-25cfc208608e-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.859565 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75da04be-79ea-4c5a-9309-25cfc208608e-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rvljz\" (UID: \"75da04be-79ea-4c5a-9309-25cfc208608e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.894203 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:21 crc kubenswrapper[4672]: E1007 14:50:21.894365 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:21 crc kubenswrapper[4672]: I1007 14:50:21.927289 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" Oct 07 14:50:22 crc kubenswrapper[4672]: I1007 14:50:22.314744 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" event={"ID":"75da04be-79ea-4c5a-9309-25cfc208608e","Type":"ContainerStarted","Data":"65cdeed5166c81b60512dc157f51bea675c87592d50b417bfa606aa264f04c7b"} Oct 07 14:50:22 crc kubenswrapper[4672]: I1007 14:50:22.314795 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" event={"ID":"75da04be-79ea-4c5a-9309-25cfc208608e","Type":"ContainerStarted","Data":"48b8cc63d6a085c5d051a1945d70bf4170b799ffde448ba0a73399d76d3b8c55"} Oct 07 14:50:22 crc kubenswrapper[4672]: I1007 14:50:22.333462 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=6.333433134 podStartE2EDuration="6.333433134s" podCreationTimestamp="2025-10-07 14:50:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:21.889611983 +0000 UTC m=+98.864790564" watchObservedRunningTime="2025-10-07 14:50:22.333433134 +0000 UTC m=+99.308611715" Oct 07 14:50:22 crc kubenswrapper[4672]: I1007 14:50:22.336641 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvljz" podStartSLOduration=78.336530234 podStartE2EDuration="1m18.336530234s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:22.329607472 +0000 UTC m=+99.304786073" watchObservedRunningTime="2025-10-07 14:50:22.336530234 +0000 UTC m=+99.311708825" Oct 07 14:50:22 crc kubenswrapper[4672]: I1007 14:50:22.891212 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:22 crc kubenswrapper[4672]: I1007 14:50:22.891273 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:22 crc kubenswrapper[4672]: E1007 14:50:22.891562 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:22 crc kubenswrapper[4672]: E1007 14:50:22.891725 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:22 crc kubenswrapper[4672]: I1007 14:50:22.891857 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:22 crc kubenswrapper[4672]: E1007 14:50:22.891929 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:23 crc kubenswrapper[4672]: I1007 14:50:23.891601 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:23 crc kubenswrapper[4672]: E1007 14:50:23.897756 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:24 crc kubenswrapper[4672]: I1007 14:50:24.891355 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:24 crc kubenswrapper[4672]: E1007 14:50:24.891495 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:24 crc kubenswrapper[4672]: I1007 14:50:24.891748 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:24 crc kubenswrapper[4672]: E1007 14:50:24.891823 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:24 crc kubenswrapper[4672]: I1007 14:50:24.891951 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:24 crc kubenswrapper[4672]: E1007 14:50:24.892007 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:25 crc kubenswrapper[4672]: I1007 14:50:25.891279 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:25 crc kubenswrapper[4672]: E1007 14:50:25.891529 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:26 crc kubenswrapper[4672]: I1007 14:50:26.891498 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:26 crc kubenswrapper[4672]: I1007 14:50:26.891500 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:26 crc kubenswrapper[4672]: E1007 14:50:26.891635 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:26 crc kubenswrapper[4672]: I1007 14:50:26.891520 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:26 crc kubenswrapper[4672]: E1007 14:50:26.891797 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:26 crc kubenswrapper[4672]: E1007 14:50:26.891980 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:27 crc kubenswrapper[4672]: I1007 14:50:27.891879 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:27 crc kubenswrapper[4672]: E1007 14:50:27.891981 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:28 crc kubenswrapper[4672]: I1007 14:50:28.890716 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:28 crc kubenswrapper[4672]: I1007 14:50:28.890716 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:28 crc kubenswrapper[4672]: I1007 14:50:28.890729 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:28 crc kubenswrapper[4672]: E1007 14:50:28.890837 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:28 crc kubenswrapper[4672]: E1007 14:50:28.890961 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:28 crc kubenswrapper[4672]: E1007 14:50:28.891248 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:29 crc kubenswrapper[4672]: I1007 14:50:29.891833 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:29 crc kubenswrapper[4672]: E1007 14:50:29.891980 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:29 crc kubenswrapper[4672]: I1007 14:50:29.893122 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 14:50:29 crc kubenswrapper[4672]: E1007 14:50:29.893362 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bqr7_openshift-ovn-kubernetes(be16af59-c2d0-4922-803f-bf1544dd0973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" Oct 07 14:50:30 crc kubenswrapper[4672]: I1007 14:50:30.891200 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:30 crc kubenswrapper[4672]: I1007 14:50:30.891328 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:30 crc kubenswrapper[4672]: I1007 14:50:30.891332 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:30 crc kubenswrapper[4672]: E1007 14:50:30.891486 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:30 crc kubenswrapper[4672]: E1007 14:50:30.891555 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:30 crc kubenswrapper[4672]: E1007 14:50:30.891642 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:31 crc kubenswrapper[4672]: I1007 14:50:31.891226 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:31 crc kubenswrapper[4672]: E1007 14:50:31.891361 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:32 crc kubenswrapper[4672]: I1007 14:50:32.891661 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:32 crc kubenswrapper[4672]: I1007 14:50:32.891661 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:32 crc kubenswrapper[4672]: E1007 14:50:32.891805 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:32 crc kubenswrapper[4672]: E1007 14:50:32.891943 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:32 crc kubenswrapper[4672]: I1007 14:50:32.891687 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:32 crc kubenswrapper[4672]: E1007 14:50:32.892058 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:33 crc kubenswrapper[4672]: I1007 14:50:33.891393 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:33 crc kubenswrapper[4672]: E1007 14:50:33.892436 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:34 crc kubenswrapper[4672]: I1007 14:50:34.891386 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:34 crc kubenswrapper[4672]: E1007 14:50:34.891710 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:34 crc kubenswrapper[4672]: I1007 14:50:34.891459 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:34 crc kubenswrapper[4672]: I1007 14:50:34.891414 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:34 crc kubenswrapper[4672]: E1007 14:50:34.891769 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:34 crc kubenswrapper[4672]: E1007 14:50:34.891853 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:35 crc kubenswrapper[4672]: I1007 14:50:35.891380 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:35 crc kubenswrapper[4672]: E1007 14:50:35.891527 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:36 crc kubenswrapper[4672]: I1007 14:50:36.890719 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:36 crc kubenswrapper[4672]: E1007 14:50:36.890863 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:36 crc kubenswrapper[4672]: I1007 14:50:36.890887 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:36 crc kubenswrapper[4672]: E1007 14:50:36.890962 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:36 crc kubenswrapper[4672]: I1007 14:50:36.890887 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:36 crc kubenswrapper[4672]: E1007 14:50:36.891169 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:37 crc kubenswrapper[4672]: I1007 14:50:37.891300 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:37 crc kubenswrapper[4672]: E1007 14:50:37.894333 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:38 crc kubenswrapper[4672]: I1007 14:50:38.367694 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/1.log" Oct 07 14:50:38 crc kubenswrapper[4672]: I1007 14:50:38.368260 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/0.log" Oct 07 14:50:38 crc kubenswrapper[4672]: I1007 14:50:38.368327 4672 generic.go:334] "Generic (PLEG): container finished" podID="a0a0e29e-f4b1-4573-b5a7-3dc297f92a62" containerID="a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925" exitCode=1 Oct 07 14:50:38 crc kubenswrapper[4672]: I1007 14:50:38.368389 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l8k8z" event={"ID":"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62","Type":"ContainerDied","Data":"a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925"} Oct 07 14:50:38 crc kubenswrapper[4672]: I1007 14:50:38.368458 4672 scope.go:117] "RemoveContainer" containerID="04c40eb8f7f424ecdcf949e44735bb27d892f8bd4794fd07274b5d63cf12023b" Oct 07 14:50:38 crc kubenswrapper[4672]: I1007 14:50:38.369147 4672 scope.go:117] "RemoveContainer" containerID="a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925" Oct 07 14:50:38 crc kubenswrapper[4672]: E1007 14:50:38.369448 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-l8k8z_openshift-multus(a0a0e29e-f4b1-4573-b5a7-3dc297f92a62)\"" pod="openshift-multus/multus-l8k8z" podUID="a0a0e29e-f4b1-4573-b5a7-3dc297f92a62" Oct 07 14:50:38 crc kubenswrapper[4672]: I1007 14:50:38.891520 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:38 crc kubenswrapper[4672]: I1007 14:50:38.891601 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:38 crc kubenswrapper[4672]: I1007 14:50:38.891544 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:38 crc kubenswrapper[4672]: E1007 14:50:38.891722 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:38 crc kubenswrapper[4672]: E1007 14:50:38.891926 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:38 crc kubenswrapper[4672]: E1007 14:50:38.892017 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:39 crc kubenswrapper[4672]: I1007 14:50:39.373282 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/1.log" Oct 07 14:50:39 crc kubenswrapper[4672]: I1007 14:50:39.890806 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:39 crc kubenswrapper[4672]: E1007 14:50:39.890929 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:40 crc kubenswrapper[4672]: I1007 14:50:40.891127 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:40 crc kubenswrapper[4672]: I1007 14:50:40.891210 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:40 crc kubenswrapper[4672]: I1007 14:50:40.891274 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:40 crc kubenswrapper[4672]: E1007 14:50:40.891416 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:40 crc kubenswrapper[4672]: E1007 14:50:40.891651 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:40 crc kubenswrapper[4672]: E1007 14:50:40.891770 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:41 crc kubenswrapper[4672]: I1007 14:50:41.891926 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:41 crc kubenswrapper[4672]: E1007 14:50:41.892492 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:41 crc kubenswrapper[4672]: I1007 14:50:41.892813 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 14:50:42 crc kubenswrapper[4672]: I1007 14:50:42.383737 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/3.log" Oct 07 14:50:42 crc kubenswrapper[4672]: I1007 14:50:42.386126 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerStarted","Data":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} Oct 07 14:50:42 crc kubenswrapper[4672]: I1007 14:50:42.386587 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:50:42 crc kubenswrapper[4672]: I1007 14:50:42.749381 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podStartSLOduration=99.749361562 podStartE2EDuration="1m39.749361562s" podCreationTimestamp="2025-10-07 14:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:50:42.42754089 +0000 UTC m=+119.402719501" watchObservedRunningTime="2025-10-07 14:50:42.749361562 +0000 UTC m=+119.724540143" Oct 07 14:50:42 crc kubenswrapper[4672]: I1007 14:50:42.750194 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-mfxdl"] Oct 07 14:50:42 crc kubenswrapper[4672]: I1007 14:50:42.750274 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:42 crc kubenswrapper[4672]: E1007 14:50:42.750356 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:42 crc kubenswrapper[4672]: I1007 14:50:42.891261 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:42 crc kubenswrapper[4672]: E1007 14:50:42.891709 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:42 crc kubenswrapper[4672]: I1007 14:50:42.891496 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:42 crc kubenswrapper[4672]: E1007 14:50:42.891785 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:42 crc kubenswrapper[4672]: I1007 14:50:42.891335 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:42 crc kubenswrapper[4672]: E1007 14:50:42.891832 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:43 crc kubenswrapper[4672]: E1007 14:50:43.923195 4672 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 07 14:50:44 crc kubenswrapper[4672]: E1007 14:50:44.019349 4672 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 07 14:50:44 crc kubenswrapper[4672]: I1007 14:50:44.890898 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:44 crc kubenswrapper[4672]: I1007 14:50:44.890904 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:44 crc kubenswrapper[4672]: E1007 14:50:44.891117 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:44 crc kubenswrapper[4672]: I1007 14:50:44.890928 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:44 crc kubenswrapper[4672]: I1007 14:50:44.890898 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:44 crc kubenswrapper[4672]: E1007 14:50:44.891282 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:44 crc kubenswrapper[4672]: E1007 14:50:44.891455 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:44 crc kubenswrapper[4672]: E1007 14:50:44.891570 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:46 crc kubenswrapper[4672]: I1007 14:50:46.891731 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:46 crc kubenswrapper[4672]: I1007 14:50:46.891735 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:46 crc kubenswrapper[4672]: E1007 14:50:46.891857 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:46 crc kubenswrapper[4672]: I1007 14:50:46.891760 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:46 crc kubenswrapper[4672]: I1007 14:50:46.891737 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:46 crc kubenswrapper[4672]: E1007 14:50:46.891942 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:46 crc kubenswrapper[4672]: E1007 14:50:46.892073 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:46 crc kubenswrapper[4672]: E1007 14:50:46.892139 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:48 crc kubenswrapper[4672]: I1007 14:50:48.891370 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:48 crc kubenswrapper[4672]: I1007 14:50:48.891417 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:48 crc kubenswrapper[4672]: I1007 14:50:48.891462 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:48 crc kubenswrapper[4672]: I1007 14:50:48.891382 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:48 crc kubenswrapper[4672]: E1007 14:50:48.891520 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:48 crc kubenswrapper[4672]: E1007 14:50:48.891587 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:48 crc kubenswrapper[4672]: E1007 14:50:48.891761 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:48 crc kubenswrapper[4672]: E1007 14:50:48.891832 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:49 crc kubenswrapper[4672]: E1007 14:50:49.021229 4672 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 07 14:50:50 crc kubenswrapper[4672]: I1007 14:50:50.891698 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:50 crc kubenswrapper[4672]: I1007 14:50:50.891750 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:50 crc kubenswrapper[4672]: E1007 14:50:50.891925 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:50 crc kubenswrapper[4672]: I1007 14:50:50.891767 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:50 crc kubenswrapper[4672]: I1007 14:50:50.891767 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:50 crc kubenswrapper[4672]: E1007 14:50:50.892055 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:50 crc kubenswrapper[4672]: E1007 14:50:50.892049 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:50 crc kubenswrapper[4672]: E1007 14:50:50.892198 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:52 crc kubenswrapper[4672]: I1007 14:50:52.891462 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:52 crc kubenswrapper[4672]: E1007 14:50:52.892537 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:52 crc kubenswrapper[4672]: I1007 14:50:52.891541 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:52 crc kubenswrapper[4672]: I1007 14:50:52.891502 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:52 crc kubenswrapper[4672]: I1007 14:50:52.891879 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:52 crc kubenswrapper[4672]: I1007 14:50:52.891832 4672 scope.go:117] "RemoveContainer" containerID="a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925" Oct 07 14:50:52 crc kubenswrapper[4672]: E1007 14:50:52.892715 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:52 crc kubenswrapper[4672]: E1007 14:50:52.892882 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:52 crc kubenswrapper[4672]: E1007 14:50:52.893089 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:53 crc kubenswrapper[4672]: I1007 14:50:53.435446 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/1.log" Oct 07 14:50:53 crc kubenswrapper[4672]: I1007 14:50:53.435507 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l8k8z" event={"ID":"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62","Type":"ContainerStarted","Data":"b55b4513fc3f63daea26199ada496c4865e05f40aa4e1fbf2129675e324d5a0c"} Oct 07 14:50:54 crc kubenswrapper[4672]: E1007 14:50:54.022082 4672 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 07 14:50:54 crc kubenswrapper[4672]: I1007 14:50:54.890805 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:54 crc kubenswrapper[4672]: I1007 14:50:54.890882 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:54 crc kubenswrapper[4672]: I1007 14:50:54.890938 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:54 crc kubenswrapper[4672]: E1007 14:50:54.891076 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:54 crc kubenswrapper[4672]: I1007 14:50:54.891112 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:54 crc kubenswrapper[4672]: E1007 14:50:54.891189 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:54 crc kubenswrapper[4672]: E1007 14:50:54.891286 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:54 crc kubenswrapper[4672]: E1007 14:50:54.891401 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:56 crc kubenswrapper[4672]: I1007 14:50:56.890870 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:56 crc kubenswrapper[4672]: I1007 14:50:56.890918 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:56 crc kubenswrapper[4672]: I1007 14:50:56.890892 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:56 crc kubenswrapper[4672]: E1007 14:50:56.890992 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:56 crc kubenswrapper[4672]: I1007 14:50:56.891003 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:56 crc kubenswrapper[4672]: E1007 14:50:56.891134 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:50:56 crc kubenswrapper[4672]: E1007 14:50:56.891178 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:56 crc kubenswrapper[4672]: E1007 14:50:56.891232 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:58 crc kubenswrapper[4672]: I1007 14:50:58.891192 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:50:58 crc kubenswrapper[4672]: I1007 14:50:58.891214 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:50:58 crc kubenswrapper[4672]: E1007 14:50:58.891453 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 14:50:58 crc kubenswrapper[4672]: I1007 14:50:58.891492 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:50:58 crc kubenswrapper[4672]: I1007 14:50:58.891669 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:50:58 crc kubenswrapper[4672]: E1007 14:50:58.891745 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 14:50:58 crc kubenswrapper[4672]: E1007 14:50:58.891874 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mfxdl" podUID="3bab8ff7-6484-479d-9423-0ce0c8f7beff" Oct 07 14:50:58 crc kubenswrapper[4672]: E1007 14:50:58.892046 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.891442 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.891477 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.891522 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.891450 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.901182 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.901203 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.901371 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.901412 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.901608 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 07 14:51:00 crc kubenswrapper[4672]: I1007 14:51:00.901654 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.343727 4672 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.379801 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kqvf5"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.380272 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.385394 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.385806 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.386145 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-5547l"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.386477 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.387212 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.387508 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.388082 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.388107 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.388249 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.388956 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.389957 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.391144 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.391399 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.391463 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.391575 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.391985 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.392918 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5fqpf"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.393294 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.394557 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rsb29"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.394983 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.395342 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.395423 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.395931 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-trnl6"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.396596 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.397652 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.398030 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.398298 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-w45mb"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.398298 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.398454 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.406620 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.398605 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.398638 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.398673 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.408086 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.411075 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.412458 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.412897 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.413677 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.414604 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.414689 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.414610 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.431492 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432074 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432181 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432302 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432352 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432435 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432466 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432312 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432597 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432723 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432730 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432757 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432896 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432954 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.432888 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.433210 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.434261 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.434826 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.437399 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.442000 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-bwrwj"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.442104 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.442937 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.444048 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.444461 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.444576 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.444738 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.447690 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.448281 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.448323 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.448690 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hknhb"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.449301 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.450153 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-j5nfj"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.450751 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.450948 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.451494 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.453084 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5t2r9"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.453772 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.453804 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.453974 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.454423 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.456295 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.456827 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.456955 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457092 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457208 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457273 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457390 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457433 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457492 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457561 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457577 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457551 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457663 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457689 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457702 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457770 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457789 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457862 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457898 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457930 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.457942 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458057 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458149 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458161 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458296 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-n8l22"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458349 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458458 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cd583d73-11c6-4955-b98f-e490f24a239e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-trnl6\" (UID: \"cd583d73-11c6-4955-b98f-e490f24a239e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458520 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46244c53-c30f-4b0d-9e2f-873bc7c7c660-serving-cert\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458547 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-oauth-serving-cert\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458567 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42m28\" (UniqueName: \"kubernetes.io/projected/84ee05d6-7689-446b-a8b6-4e186bbbec44-kube-api-access-42m28\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458588 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4rb5\" (UniqueName: \"kubernetes.io/projected/13a87120-7360-414a-8647-2f6e962db2a2-kube-api-access-q4rb5\") pod \"openshift-apiserver-operator-796bbdcf4f-zk6wm\" (UID: \"13a87120-7360-414a-8647-2f6e962db2a2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458619 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2b444f1c-1587-4f49-9235-7313b6284a43-auth-proxy-config\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458640 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7100c20d-df6c-4382-856e-68e823252e84-trusted-ca\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458662 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d7ae761c-0c0b-4949-964e-ac6a76720d41-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458683 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-audit-dir\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458707 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqmwf\" (UniqueName: \"kubernetes.io/projected/2b444f1c-1587-4f49-9235-7313b6284a43-kube-api-access-sqmwf\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458730 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-config\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458750 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz4rx\" (UniqueName: \"kubernetes.io/projected/cd583d73-11c6-4955-b98f-e490f24a239e-kube-api-access-vz4rx\") pod \"openshift-config-operator-7777fb866f-trnl6\" (UID: \"cd583d73-11c6-4955-b98f-e490f24a239e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458774 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2b444f1c-1587-4f49-9235-7313b6284a43-machine-approver-tls\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458793 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/435345a0-9e68-4178-b036-b919092da385-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458815 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458834 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddcb5\" (UniqueName: \"kubernetes.io/projected/7100c20d-df6c-4382-856e-68e823252e84-kube-api-access-ddcb5\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458854 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-config\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458874 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-config\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458894 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7ae761c-0c0b-4949-964e-ac6a76720d41-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458916 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbgvh\" (UniqueName: \"kubernetes.io/projected/d7ae761c-0c0b-4949-964e-ac6a76720d41-kube-api-access-dbgvh\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458937 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-trusted-ca-bundle\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458958 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d7ae761c-0c0b-4949-964e-ac6a76720d41-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458992 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/93d4decb-aa9d-40aa-8e02-c6557c64aacb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459014 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55nf2\" (UniqueName: \"kubernetes.io/projected/93d4decb-aa9d-40aa-8e02-c6557c64aacb-kube-api-access-55nf2\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459056 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-encryption-config\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459072 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459079 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-serving-cert\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459103 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d9039ee1-eaf1-4b12-9849-909d95692fda-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-bkmq6\" (UID: \"d9039ee1-eaf1-4b12-9849-909d95692fda\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459131 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-audit-policies\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459208 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d45d6f67-5e1c-4892-81c4-f6f227caa2d5-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7cr2m\" (UID: \"d45d6f67-5e1c-4892-81c4-f6f227caa2d5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459235 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbhlw\" (UniqueName: \"kubernetes.io/projected/1f715b04-a647-46de-8588-bff20df5820f-kube-api-access-qbhlw\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459256 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b444f1c-1587-4f49-9235-7313b6284a43-config\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459277 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-service-ca\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459298 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d45d6f67-5e1c-4892-81c4-f6f227caa2d5-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7cr2m\" (UID: \"d45d6f67-5e1c-4892-81c4-f6f227caa2d5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459318 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/435345a0-9e68-4178-b036-b919092da385-serving-cert\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459351 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-serving-cert\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459374 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7100c20d-df6c-4382-856e-68e823252e84-config\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459396 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13a87120-7360-414a-8647-2f6e962db2a2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-zk6wm\" (UID: \"13a87120-7360-414a-8647-2f6e962db2a2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459418 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lchnl\" (UniqueName: \"kubernetes.io/projected/d9039ee1-eaf1-4b12-9849-909d95692fda-kube-api-access-lchnl\") pod \"cluster-samples-operator-665b6dd947-bkmq6\" (UID: \"d9039ee1-eaf1-4b12-9849-909d95692fda\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459438 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/435345a0-9e68-4178-b036-b919092da385-config\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459458 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zschd\" (UniqueName: \"kubernetes.io/projected/435345a0-9e68-4178-b036-b919092da385-kube-api-access-zschd\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459480 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4n7m\" (UniqueName: \"kubernetes.io/projected/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-kube-api-access-k4n7m\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459504 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef4ea638-65f8-4a4b-a587-0ac860e0478d-config\") pod \"kube-controller-manager-operator-78b949d7b-n2wt6\" (UID: \"ef4ea638-65f8-4a4b-a587-0ac860e0478d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459527 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93d4decb-aa9d-40aa-8e02-c6557c64aacb-config\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459549 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7100c20d-df6c-4382-856e-68e823252e84-serving-cert\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459569 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-oauth-config\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459590 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/93d4decb-aa9d-40aa-8e02-c6557c64aacb-images\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459623 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-etcd-client\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458525 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459700 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-client-ca\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.458867 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459770 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/435345a0-9e68-4178-b036-b919092da385-service-ca-bundle\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459790 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-client-ca\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459800 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459829 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13a87120-7360-414a-8647-2f6e962db2a2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-zk6wm\" (UID: \"13a87120-7360-414a-8647-2f6e962db2a2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459840 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459845 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459854 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459881 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef4ea638-65f8-4a4b-a587-0ac860e0478d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-n2wt6\" (UID: \"ef4ea638-65f8-4a4b-a587-0ac860e0478d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459905 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459904 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459930 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459941 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd583d73-11c6-4955-b98f-e490f24a239e-serving-cert\") pod \"openshift-config-operator-7777fb866f-trnl6\" (UID: \"cd583d73-11c6-4955-b98f-e490f24a239e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.459965 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f715b04-a647-46de-8588-bff20df5820f-serving-cert\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460001 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460036 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460037 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef4ea638-65f8-4a4b-a587-0ac860e0478d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-n2wt6\" (UID: \"ef4ea638-65f8-4a4b-a587-0ac860e0478d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460059 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460089 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv4tf\" (UniqueName: \"kubernetes.io/projected/d45d6f67-5e1c-4892-81c4-f6f227caa2d5-kube-api-access-fv4tf\") pod \"openshift-controller-manager-operator-756b6f6bc6-7cr2m\" (UID: \"d45d6f67-5e1c-4892-81c4-f6f227caa2d5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460101 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460163 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460168 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460209 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460161 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n8dg\" (UniqueName: \"kubernetes.io/projected/46244c53-c30f-4b0d-9e2f-873bc7c7c660-kube-api-access-8n8dg\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460180 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460260 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460333 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.460369 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.461217 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.461708 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.467341 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.467629 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.467871 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.468092 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.468095 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.468286 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.468502 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.468556 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.468752 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.468881 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.469087 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.471474 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sqktl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.472104 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.476068 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-rx4tl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.503039 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.536163 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.536616 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.536854 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-rx4tl" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.539158 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.539848 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.540266 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.540515 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.541691 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.542266 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.543402 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.545510 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.545543 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.546151 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.546229 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.546477 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.547060 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.547185 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kqvf5"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.547618 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.548188 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9krvm"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.548890 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.549367 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-62btl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.549496 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.549886 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.550249 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-62btl" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.551758 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.551886 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.552526 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.554333 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.555187 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.555483 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fgbcz"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.555993 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.556847 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.557686 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.557923 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.558368 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.559637 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.560527 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-5rthz"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.561247 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.562792 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563360 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-bound-sa-token\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563414 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7ae761c-0c0b-4949-964e-ac6a76720d41-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563445 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbgvh\" (UniqueName: \"kubernetes.io/projected/d7ae761c-0c0b-4949-964e-ac6a76720d41-kube-api-access-dbgvh\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563475 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-trusted-ca-bundle\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563499 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-config\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563534 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/93d4decb-aa9d-40aa-8e02-c6557c64aacb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563561 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55nf2\" (UniqueName: \"kubernetes.io/projected/93d4decb-aa9d-40aa-8e02-c6557c64aacb-kube-api-access-55nf2\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563585 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-encryption-config\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563608 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d7ae761c-0c0b-4949-964e-ac6a76720d41-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563635 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0fe61e6a-3e91-41df-b397-47f2d55b6b5f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5fs7\" (UID: \"0fe61e6a-3e91-41df-b397-47f2d55b6b5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563658 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-serving-cert\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563687 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-audit-policies\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563712 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d9039ee1-eaf1-4b12-9849-909d95692fda-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-bkmq6\" (UID: \"d9039ee1-eaf1-4b12-9849-909d95692fda\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563737 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/684124fe-9cb9-4aa2-962d-9699e29f9ec5-node-pullsecrets\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563760 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5t2r9\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563785 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d45d6f67-5e1c-4892-81c4-f6f227caa2d5-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7cr2m\" (UID: \"d45d6f67-5e1c-4892-81c4-f6f227caa2d5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563807 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbhlw\" (UniqueName: \"kubernetes.io/projected/1f715b04-a647-46de-8588-bff20df5820f-kube-api-access-qbhlw\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563832 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563861 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b444f1c-1587-4f49-9235-7313b6284a43-config\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563882 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-service-ca\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563902 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d45d6f67-5e1c-4892-81c4-f6f227caa2d5-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7cr2m\" (UID: \"d45d6f67-5e1c-4892-81c4-f6f227caa2d5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563925 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/435345a0-9e68-4178-b036-b919092da385-serving-cert\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563947 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-dir\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.563980 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-serving-cert\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564004 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7100c20d-df6c-4382-856e-68e823252e84-config\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564057 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13a87120-7360-414a-8647-2f6e962db2a2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-zk6wm\" (UID: \"13a87120-7360-414a-8647-2f6e962db2a2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564082 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-policies\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564108 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/943fe211-1f41-4621-a164-678a5a38ec82-stats-auth\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564133 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lchnl\" (UniqueName: \"kubernetes.io/projected/d9039ee1-eaf1-4b12-9849-909d95692fda-kube-api-access-lchnl\") pod \"cluster-samples-operator-665b6dd947-bkmq6\" (UID: \"d9039ee1-eaf1-4b12-9849-909d95692fda\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564156 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/435345a0-9e68-4178-b036-b919092da385-config\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564180 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zschd\" (UniqueName: \"kubernetes.io/projected/435345a0-9e68-4178-b036-b919092da385-kube-api-access-zschd\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564205 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4n7m\" (UniqueName: \"kubernetes.io/projected/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-kube-api-access-k4n7m\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564228 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-config\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564253 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564278 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef4ea638-65f8-4a4b-a587-0ac860e0478d-config\") pod \"kube-controller-manager-operator-78b949d7b-n2wt6\" (UID: \"ef4ea638-65f8-4a4b-a587-0ac860e0478d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564301 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564324 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93d4decb-aa9d-40aa-8e02-c6557c64aacb-config\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564348 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7100c20d-df6c-4382-856e-68e823252e84-serving-cert\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564371 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-oauth-config\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564396 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx6m5\" (UniqueName: \"kubernetes.io/projected/0fe61e6a-3e91-41df-b397-47f2d55b6b5f-kube-api-access-kx6m5\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5fs7\" (UID: \"0fe61e6a-3e91-41df-b397-47f2d55b6b5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564422 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/93d4decb-aa9d-40aa-8e02-c6557c64aacb-images\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564447 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-etcd-client\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564468 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-image-import-ca\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564494 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564517 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fe61e6a-3e91-41df-b397-47f2d55b6b5f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5fs7\" (UID: \"0fe61e6a-3e91-41df-b397-47f2d55b6b5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564544 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-client-ca\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564568 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bqng\" (UniqueName: \"kubernetes.io/projected/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-kube-api-access-7bqng\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564602 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/435345a0-9e68-4178-b036-b919092da385-service-ca-bundle\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564624 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-client-ca\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564645 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-audit\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564665 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/684124fe-9cb9-4aa2-962d-9699e29f9ec5-serving-cert\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564690 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13a87120-7360-414a-8647-2f6e962db2a2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-zk6wm\" (UID: \"13a87120-7360-414a-8647-2f6e962db2a2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564712 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564735 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/943fe211-1f41-4621-a164-678a5a38ec82-service-ca-bundle\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564759 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef4ea638-65f8-4a4b-a587-0ac860e0478d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-n2wt6\" (UID: \"ef4ea638-65f8-4a4b-a587-0ac860e0478d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564784 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzpm4\" (UniqueName: \"kubernetes.io/projected/e3d90d2e-7f97-400b-a29b-7bf63a75b43c-kube-api-access-kzpm4\") pod \"downloads-7954f5f757-rx4tl\" (UID: \"e3d90d2e-7f97-400b-a29b-7bf63a75b43c\") " pod="openshift-console/downloads-7954f5f757-rx4tl" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564809 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564832 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564856 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd583d73-11c6-4955-b98f-e490f24a239e-serving-cert\") pod \"openshift-config-operator-7777fb866f-trnl6\" (UID: \"cd583d73-11c6-4955-b98f-e490f24a239e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564879 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f715b04-a647-46de-8588-bff20df5820f-serving-cert\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564903 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564930 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef4ea638-65f8-4a4b-a587-0ac860e0478d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-n2wt6\" (UID: \"ef4ea638-65f8-4a4b-a587-0ac860e0478d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564954 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.564976 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n2qn\" (UniqueName: \"kubernetes.io/projected/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-kube-api-access-7n2qn\") pod \"marketplace-operator-79b997595-5t2r9\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565004 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv4tf\" (UniqueName: \"kubernetes.io/projected/d45d6f67-5e1c-4892-81c4-f6f227caa2d5-kube-api-access-fv4tf\") pod \"openshift-controller-manager-operator-756b6f6bc6-7cr2m\" (UID: \"d45d6f67-5e1c-4892-81c4-f6f227caa2d5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565418 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bg2p\" (UniqueName: \"kubernetes.io/projected/684124fe-9cb9-4aa2-962d-9699e29f9ec5-kube-api-access-2bg2p\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565477 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565506 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7hqz\" (UniqueName: \"kubernetes.io/projected/943fe211-1f41-4621-a164-678a5a38ec82-kube-api-access-n7hqz\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565542 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n8dg\" (UniqueName: \"kubernetes.io/projected/46244c53-c30f-4b0d-9e2f-873bc7c7c660-kube-api-access-8n8dg\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565565 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/684124fe-9cb9-4aa2-962d-9699e29f9ec5-audit-dir\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565589 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565612 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5t2r9\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565636 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565659 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-etcd-serving-ca\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565695 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cd583d73-11c6-4955-b98f-e490f24a239e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-trnl6\" (UID: \"cd583d73-11c6-4955-b98f-e490f24a239e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565722 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46244c53-c30f-4b0d-9e2f-873bc7c7c660-serving-cert\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565745 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-oauth-serving-cert\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565768 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42m28\" (UniqueName: \"kubernetes.io/projected/84ee05d6-7689-446b-a8b6-4e186bbbec44-kube-api-access-42m28\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565793 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4rb5\" (UniqueName: \"kubernetes.io/projected/13a87120-7360-414a-8647-2f6e962db2a2-kube-api-access-q4rb5\") pod \"openshift-apiserver-operator-796bbdcf4f-zk6wm\" (UID: \"13a87120-7360-414a-8647-2f6e962db2a2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565817 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2b444f1c-1587-4f49-9235-7313b6284a43-auth-proxy-config\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.565842 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7100c20d-df6c-4382-856e-68e823252e84-trusted-ca\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567399 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/684124fe-9cb9-4aa2-962d-9699e29f9ec5-etcd-client\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567430 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d7ae761c-0c0b-4949-964e-ac6a76720d41-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567457 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfjl2\" (UniqueName: \"kubernetes.io/projected/e626da5b-36d9-405a-b44e-fb6c355a51ba-kube-api-access-jfjl2\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567479 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-metrics-tls\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567505 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-audit-dir\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567528 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqmwf\" (UniqueName: \"kubernetes.io/projected/2b444f1c-1587-4f49-9235-7313b6284a43-kube-api-access-sqmwf\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567553 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-config\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567576 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/943fe211-1f41-4621-a164-678a5a38ec82-default-certificate\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567600 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/943fe211-1f41-4621-a164-678a5a38ec82-metrics-certs\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567629 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz4rx\" (UniqueName: \"kubernetes.io/projected/cd583d73-11c6-4955-b98f-e490f24a239e-kube-api-access-vz4rx\") pod \"openshift-config-operator-7777fb866f-trnl6\" (UID: \"cd583d73-11c6-4955-b98f-e490f24a239e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567654 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-trusted-ca\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567682 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2b444f1c-1587-4f49-9235-7313b6284a43-machine-approver-tls\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567707 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/435345a0-9e68-4178-b036-b919092da385-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567731 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/684124fe-9cb9-4aa2-962d-9699e29f9ec5-encryption-config\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567754 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567783 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567810 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddcb5\" (UniqueName: \"kubernetes.io/projected/7100c20d-df6c-4382-856e-68e823252e84-kube-api-access-ddcb5\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567839 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-config\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567863 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.568249 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-trusted-ca-bundle\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.569400 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.571064 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-trnl6"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.571116 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-w45mb"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.571127 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5fqpf"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.571266 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/93d4decb-aa9d-40aa-8e02-c6557c64aacb-images\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.572519 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-client-ca\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.572692 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-5547l"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.573533 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-client-ca\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.573644 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.573768 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13a87120-7360-414a-8647-2f6e962db2a2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-zk6wm\" (UID: \"13a87120-7360-414a-8647-2f6e962db2a2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.574886 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-bwrwj"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.575472 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.576092 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/435345a0-9e68-4178-b036-b919092da385-service-ca-bundle\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.576186 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7100c20d-df6c-4382-856e-68e823252e84-trusted-ca\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.583061 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-oauth-serving-cert\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.583140 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2b444f1c-1587-4f49-9235-7313b6284a43-auth-proxy-config\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.584903 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.586712 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-config\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.586887 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/93d4decb-aa9d-40aa-8e02-c6557c64aacb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.587372 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cd583d73-11c6-4955-b98f-e490f24a239e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-trnl6\" (UID: \"cd583d73-11c6-4955-b98f-e490f24a239e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.588962 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7100c20d-df6c-4382-856e-68e823252e84-config\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.590131 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-serving-cert\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.590683 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.590826 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-audit-policies\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.591447 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f715b04-a647-46de-8588-bff20df5820f-serving-cert\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.591635 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d7ae761c-0c0b-4949-964e-ac6a76720d41-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.591957 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b444f1c-1587-4f49-9235-7313b6284a43-config\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.592389 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13a87120-7360-414a-8647-2f6e962db2a2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-zk6wm\" (UID: \"13a87120-7360-414a-8647-2f6e962db2a2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.593156 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef4ea638-65f8-4a4b-a587-0ac860e0478d-config\") pod \"kube-controller-manager-operator-78b949d7b-n2wt6\" (UID: \"ef4ea638-65f8-4a4b-a587-0ac860e0478d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.593953 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-encryption-config\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.594880 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-etcd-client\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.595135 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93d4decb-aa9d-40aa-8e02-c6557c64aacb-config\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.595887 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-service-ca\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.567225 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7ae761c-0c0b-4949-964e-ac6a76720d41-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.596740 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-audit-dir\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.600382 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/435345a0-9e68-4178-b036-b919092da385-serving-cert\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.600579 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.601657 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-config\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.602083 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/435345a0-9e68-4178-b036-b919092da385-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.602472 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.602620 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7100c20d-df6c-4382-856e-68e823252e84-serving-cert\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.603340 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/435345a0-9e68-4178-b036-b919092da385-config\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.603721 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d45d6f67-5e1c-4892-81c4-f6f227caa2d5-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7cr2m\" (UID: \"d45d6f67-5e1c-4892-81c4-f6f227caa2d5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.603856 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46244c53-c30f-4b0d-9e2f-873bc7c7c660-serving-cert\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.604504 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d45d6f67-5e1c-4892-81c4-f6f227caa2d5-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7cr2m\" (UID: \"d45d6f67-5e1c-4892-81c4-f6f227caa2d5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.605114 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d9039ee1-eaf1-4b12-9849-909d95692fda-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-bkmq6\" (UID: \"d9039ee1-eaf1-4b12-9849-909d95692fda\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.605150 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.605611 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2b444f1c-1587-4f49-9235-7313b6284a43-machine-approver-tls\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.605861 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef4ea638-65f8-4a4b-a587-0ac860e0478d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-n2wt6\" (UID: \"ef4ea638-65f8-4a4b-a587-0ac860e0478d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.607605 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qpgsl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.608862 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-config\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.609341 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-serving-cert\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.610978 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-oauth-config\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.611271 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd583d73-11c6-4955-b98f-e490f24a239e-serving-cert\") pod \"openshift-config-operator-7777fb866f-trnl6\" (UID: \"cd583d73-11c6-4955-b98f-e490f24a239e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.618427 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.621726 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.625418 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.627718 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.629141 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-rx4tl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.630393 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.631848 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.633391 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-j5nfj"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.634518 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.636340 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.637499 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.638816 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rsb29"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.640119 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5t2r9"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.641464 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.642441 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.642763 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.643767 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sqktl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.644929 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.646216 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.647135 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.648292 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hknhb"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.649654 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.651461 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.652213 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9krvm"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.653561 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.654978 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-87dhj"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.656763 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.656640 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-ns9p2"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.657984 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-62btl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.658112 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-ns9p2" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.658138 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fgbcz"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.659606 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.660902 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-ns9p2"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.661953 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qpgsl"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.662972 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-5rthz"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.664373 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-ljj62"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.665404 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ljj62"] Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.665534 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.668653 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-config\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.668920 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.669087 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.669233 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx6m5\" (UniqueName: \"kubernetes.io/projected/0fe61e6a-3e91-41df-b397-47f2d55b6b5f-kube-api-access-kx6m5\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5fs7\" (UID: \"0fe61e6a-3e91-41df-b397-47f2d55b6b5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.669314 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-image-import-ca\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670444 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670139 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-config\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670374 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-image-import-ca\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670541 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fe61e6a-3e91-41df-b397-47f2d55b6b5f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5fs7\" (UID: \"0fe61e6a-3e91-41df-b397-47f2d55b6b5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670652 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bqng\" (UniqueName: \"kubernetes.io/projected/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-kube-api-access-7bqng\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670704 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-audit\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670727 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/684124fe-9cb9-4aa2-962d-9699e29f9ec5-serving-cert\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670753 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/943fe211-1f41-4621-a164-678a5a38ec82-service-ca-bundle\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670841 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzpm4\" (UniqueName: \"kubernetes.io/projected/e3d90d2e-7f97-400b-a29b-7bf63a75b43c-kube-api-access-kzpm4\") pod \"downloads-7954f5f757-rx4tl\" (UID: \"e3d90d2e-7f97-400b-a29b-7bf63a75b43c\") " pod="openshift-console/downloads-7954f5f757-rx4tl" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670872 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670894 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670920 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670947 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n2qn\" (UniqueName: \"kubernetes.io/projected/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-kube-api-access-7n2qn\") pod \"marketplace-operator-79b997595-5t2r9\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.670984 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bg2p\" (UniqueName: \"kubernetes.io/projected/684124fe-9cb9-4aa2-962d-9699e29f9ec5-kube-api-access-2bg2p\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671008 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671049 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7hqz\" (UniqueName: \"kubernetes.io/projected/943fe211-1f41-4621-a164-678a5a38ec82-kube-api-access-n7hqz\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671080 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/684124fe-9cb9-4aa2-962d-9699e29f9ec5-audit-dir\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671102 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671132 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5t2r9\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671150 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-etcd-serving-ca\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671177 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671176 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/684124fe-9cb9-4aa2-962d-9699e29f9ec5-audit-dir\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671228 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/684124fe-9cb9-4aa2-962d-9699e29f9ec5-etcd-client\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671255 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfjl2\" (UniqueName: \"kubernetes.io/projected/e626da5b-36d9-405a-b44e-fb6c355a51ba-kube-api-access-jfjl2\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671283 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-metrics-tls\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671317 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-audit\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671329 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/943fe211-1f41-4621-a164-678a5a38ec82-default-certificate\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671348 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/943fe211-1f41-4621-a164-678a5a38ec82-metrics-certs\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671495 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-trusted-ca\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671547 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/684124fe-9cb9-4aa2-962d-9699e29f9ec5-encryption-config\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671618 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671697 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671773 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-bound-sa-token\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671808 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671858 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/684124fe-9cb9-4aa2-962d-9699e29f9ec5-etcd-serving-ca\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671882 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0fe61e6a-3e91-41df-b397-47f2d55b6b5f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5fs7\" (UID: \"0fe61e6a-3e91-41df-b397-47f2d55b6b5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671911 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/684124fe-9cb9-4aa2-962d-9699e29f9ec5-node-pullsecrets\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671928 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5t2r9\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671956 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.671976 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-dir\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.672007 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-policies\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.672045 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/943fe211-1f41-4621-a164-678a5a38ec82-stats-auth\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.672174 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/684124fe-9cb9-4aa2-962d-9699e29f9ec5-node-pullsecrets\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.672228 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-dir\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.672859 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fe61e6a-3e91-41df-b397-47f2d55b6b5f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5fs7\" (UID: \"0fe61e6a-3e91-41df-b397-47f2d55b6b5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.673738 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-trusted-ca\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.674602 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/684124fe-9cb9-4aa2-962d-9699e29f9ec5-serving-cert\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.674736 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/684124fe-9cb9-4aa2-962d-9699e29f9ec5-etcd-client\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.675484 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/684124fe-9cb9-4aa2-962d-9699e29f9ec5-encryption-config\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.675638 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-metrics-tls\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.676114 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.676199 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0fe61e6a-3e91-41df-b397-47f2d55b6b5f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5fs7\" (UID: \"0fe61e6a-3e91-41df-b397-47f2d55b6b5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.676943 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.683786 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.686374 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.693575 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.702428 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.713160 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.723070 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.733386 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.742577 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.754406 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.768107 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.775282 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.782610 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.794132 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.803225 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.812881 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-policies\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.823492 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.833506 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.843522 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.851416 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.868653 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.873515 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.883529 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.903618 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.922695 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.943729 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.963951 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 07 14:51:02 crc kubenswrapper[4672]: I1007 14:51:02.983668 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.003243 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.023454 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.035086 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5t2r9\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.050392 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.054518 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5t2r9\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.064386 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.083963 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.104338 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.124583 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.136006 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/943fe211-1f41-4621-a164-678a5a38ec82-default-certificate\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.143600 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.156758 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/943fe211-1f41-4621-a164-678a5a38ec82-stats-auth\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.164067 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.175038 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/943fe211-1f41-4621-a164-678a5a38ec82-metrics-certs\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.184088 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.203058 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.212453 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/943fe211-1f41-4621-a164-678a5a38ec82-service-ca-bundle\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.244046 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.264065 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.283522 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.304220 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.324654 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.363419 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.382880 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.403417 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.423257 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.443717 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.463143 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.483286 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.503490 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.522689 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.543111 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.561334 4672 request.go:700] Waited for 1.014933351s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver-operator/secrets?fieldSelector=metadata.name%3Dkube-apiserver-operator-serving-cert&limit=500&resourceVersion=0 Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.562490 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.582507 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.603502 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.622468 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.642268 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.662596 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.682286 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.702705 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.722219 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.742983 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.763835 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.782744 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.803028 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.823046 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.842332 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.862772 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.882897 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.902814 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.922565 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.941978 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.963513 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 07 14:51:03 crc kubenswrapper[4672]: I1007 14:51:03.983008 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.003096 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.022646 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.042474 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.063277 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.083326 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.102762 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.122134 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.141989 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.162807 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.182966 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.217293 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbgvh\" (UniqueName: \"kubernetes.io/projected/d7ae761c-0c0b-4949-964e-ac6a76720d41-kube-api-access-dbgvh\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.236002 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef4ea638-65f8-4a4b-a587-0ac860e0478d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-n2wt6\" (UID: \"ef4ea638-65f8-4a4b-a587-0ac860e0478d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.256535 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv4tf\" (UniqueName: \"kubernetes.io/projected/d45d6f67-5e1c-4892-81c4-f6f227caa2d5-kube-api-access-fv4tf\") pod \"openshift-controller-manager-operator-756b6f6bc6-7cr2m\" (UID: \"d45d6f67-5e1c-4892-81c4-f6f227caa2d5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.276662 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n8dg\" (UniqueName: \"kubernetes.io/projected/46244c53-c30f-4b0d-9e2f-873bc7c7c660-kube-api-access-8n8dg\") pod \"controller-manager-879f6c89f-rsb29\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.296806 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42m28\" (UniqueName: \"kubernetes.io/projected/84ee05d6-7689-446b-a8b6-4e186bbbec44-kube-api-access-42m28\") pod \"console-f9d7485db-5547l\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.315925 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4rb5\" (UniqueName: \"kubernetes.io/projected/13a87120-7360-414a-8647-2f6e962db2a2-kube-api-access-q4rb5\") pod \"openshift-apiserver-operator-796bbdcf4f-zk6wm\" (UID: \"13a87120-7360-414a-8647-2f6e962db2a2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.336986 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55nf2\" (UniqueName: \"kubernetes.io/projected/93d4decb-aa9d-40aa-8e02-c6557c64aacb-kube-api-access-55nf2\") pod \"machine-api-operator-5694c8668f-w45mb\" (UID: \"93d4decb-aa9d-40aa-8e02-c6557c64aacb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.356760 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbhlw\" (UniqueName: \"kubernetes.io/projected/1f715b04-a647-46de-8588-bff20df5820f-kube-api-access-qbhlw\") pod \"route-controller-manager-6576b87f9c-dxhdb\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.378635 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4n7m\" (UniqueName: \"kubernetes.io/projected/764b1457-5fdb-4ba1-baf0-5aefbc2271ca-kube-api-access-k4n7m\") pod \"apiserver-7bbb656c7d-wqndv\" (UID: \"764b1457-5fdb-4ba1-baf0-5aefbc2271ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.397658 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d7ae761c-0c0b-4949-964e-ac6a76720d41-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-c5dx9\" (UID: \"d7ae761c-0c0b-4949-964e-ac6a76720d41\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.417130 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqmwf\" (UniqueName: \"kubernetes.io/projected/2b444f1c-1587-4f49-9235-7313b6284a43-kube-api-access-sqmwf\") pod \"machine-approver-56656f9798-9rhdm\" (UID: \"2b444f1c-1587-4f49-9235-7313b6284a43\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.429714 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.442810 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz4rx\" (UniqueName: \"kubernetes.io/projected/cd583d73-11c6-4955-b98f-e490f24a239e-kube-api-access-vz4rx\") pod \"openshift-config-operator-7777fb866f-trnl6\" (UID: \"cd583d73-11c6-4955-b98f-e490f24a239e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.455301 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.458492 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddcb5\" (UniqueName: \"kubernetes.io/projected/7100c20d-df6c-4382-856e-68e823252e84-kube-api-access-ddcb5\") pod \"console-operator-58897d9998-5fqpf\" (UID: \"7100c20d-df6c-4382-856e-68e823252e84\") " pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.464443 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.473651 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.476290 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zschd\" (UniqueName: \"kubernetes.io/projected/435345a0-9e68-4178-b036-b919092da385-kube-api-access-zschd\") pod \"authentication-operator-69f744f599-kqvf5\" (UID: \"435345a0-9e68-4178-b036-b919092da385\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.497098 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.497530 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lchnl\" (UniqueName: \"kubernetes.io/projected/d9039ee1-eaf1-4b12-9849-909d95692fda-kube-api-access-lchnl\") pod \"cluster-samples-operator-665b6dd947-bkmq6\" (UID: \"d9039ee1-eaf1-4b12-9849-909d95692fda\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.502554 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.508141 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.523196 4672 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.540993 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.541160 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.542540 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.559925 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.562825 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.570439 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.581367 4672 request.go:700] Waited for 1.924273542s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dnode-bootstrapper-token&limit=500&resourceVersion=0 Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.583116 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.606412 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.626276 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.638412 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.644242 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.662854 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.665048 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.684315 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.704067 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.724578 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.742842 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.744416 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.746118 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.751886 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-w45mb"] Oct 07 14:51:04 crc kubenswrapper[4672]: W1007 14:51:04.755670 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b444f1c_1587_4f49_9235_7313b6284a43.slice/crio-108b46c52063133eeb6c3442cfdbf0ec4c16c5415a3ecc19ae9a18d61c32d437 WatchSource:0}: Error finding container 108b46c52063133eeb6c3442cfdbf0ec4c16c5415a3ecc19ae9a18d61c32d437: Status 404 returned error can't find the container with id 108b46c52063133eeb6c3442cfdbf0ec4c16c5415a3ecc19ae9a18d61c32d437 Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.802380 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bqng\" (UniqueName: \"kubernetes.io/projected/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-kube-api-access-7bqng\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.802887 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx6m5\" (UniqueName: \"kubernetes.io/projected/0fe61e6a-3e91-41df-b397-47f2d55b6b5f-kube-api-access-kx6m5\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5fs7\" (UID: \"0fe61e6a-3e91-41df-b397-47f2d55b6b5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:04 crc kubenswrapper[4672]: W1007 14:51:04.812664 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93d4decb_aa9d_40aa_8e02_c6557c64aacb.slice/crio-8dae3a694362edf45790ad0770354f7dbe718157741762d9536e83001b119ad1 WatchSource:0}: Error finding container 8dae3a694362edf45790ad0770354f7dbe718157741762d9536e83001b119ad1: Status 404 returned error can't find the container with id 8dae3a694362edf45790ad0770354f7dbe718157741762d9536e83001b119ad1 Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.817950 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.823955 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzpm4\" (UniqueName: \"kubernetes.io/projected/e3d90d2e-7f97-400b-a29b-7bf63a75b43c-kube-api-access-kzpm4\") pod \"downloads-7954f5f757-rx4tl\" (UID: \"e3d90d2e-7f97-400b-a29b-7bf63a75b43c\") " pod="openshift-console/downloads-7954f5f757-rx4tl" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.860829 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bg2p\" (UniqueName: \"kubernetes.io/projected/684124fe-9cb9-4aa2-962d-9699e29f9ec5-kube-api-access-2bg2p\") pod \"apiserver-76f77b778f-bwrwj\" (UID: \"684124fe-9cb9-4aa2-962d-9699e29f9ec5\") " pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.861117 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n2qn\" (UniqueName: \"kubernetes.io/projected/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-kube-api-access-7n2qn\") pod \"marketplace-operator-79b997595-5t2r9\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.873502 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.882750 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7hqz\" (UniqueName: \"kubernetes.io/projected/943fe211-1f41-4621-a164-678a5a38ec82-kube-api-access-n7hqz\") pod \"router-default-5444994796-n8l22\" (UID: \"943fe211-1f41-4621-a164-678a5a38ec82\") " pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.903753 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfjl2\" (UniqueName: \"kubernetes.io/projected/e626da5b-36d9-405a-b44e-fb6c355a51ba-kube-api-access-jfjl2\") pod \"oauth-openshift-558db77b4-hknhb\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.918701 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-rx4tl" Oct 07 14:51:04 crc kubenswrapper[4672]: I1007 14:51:04.945809 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a3f2d35f-7a5d-4d53-91b7-9ace62bc4305-bound-sa-token\") pod \"ingress-operator-5b745b69d9-ssnbh\" (UID: \"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.003885 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.003979 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rsb29"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005402 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005474 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eed23ae9-82ee-4db9-b353-70033e17e039-installation-pull-secrets\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005503 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpqjw\" (UniqueName: \"kubernetes.io/projected/5462cde6-0d5f-4802-bf9b-374b14693d1e-kube-api-access-zpqjw\") pod \"catalog-operator-68c6474976-6442q\" (UID: \"5462cde6-0d5f-4802-bf9b-374b14693d1e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005549 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-registry-tls\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005595 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2bbe5ef4-60d9-4316-9a67-83c9eff14cbc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-t4nwf\" (UID: \"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005700 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5462cde6-0d5f-4802-bf9b-374b14693d1e-profile-collector-cert\") pod \"catalog-operator-68c6474976-6442q\" (UID: \"5462cde6-0d5f-4802-bf9b-374b14693d1e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005813 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5462cde6-0d5f-4802-bf9b-374b14693d1e-srv-cert\") pod \"catalog-operator-68c6474976-6442q\" (UID: \"5462cde6-0d5f-4802-bf9b-374b14693d1e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005837 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv6bd\" (UniqueName: \"kubernetes.io/projected/2bbe5ef4-60d9-4316-9a67-83c9eff14cbc-kube-api-access-nv6bd\") pod \"machine-config-controller-84d6567774-t4nwf\" (UID: \"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005874 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2qz7\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-kube-api-access-d2qz7\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005911 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2bbe5ef4-60d9-4316-9a67-83c9eff14cbc-proxy-tls\") pod \"machine-config-controller-84d6567774-t4nwf\" (UID: \"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.005953 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/203a8e07-0bf9-4436-86eb-5e90cbe1a1d9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-j5nfj\" (UID: \"203a8e07-0bf9-4436-86eb-5e90cbe1a1d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.006002 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-registry-certificates\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.006061 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/60c3cf1b-18ef-48df-9be9-2e4219738896-profile-collector-cert\") pod \"olm-operator-6b444d44fb-747wv\" (UID: \"60c3cf1b-18ef-48df-9be9-2e4219738896\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.006100 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eed23ae9-82ee-4db9-b353-70033e17e039-ca-trust-extracted\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.006127 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-trusted-ca\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.006148 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-bound-sa-token\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.006171 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc9hw\" (UniqueName: \"kubernetes.io/projected/203a8e07-0bf9-4436-86eb-5e90cbe1a1d9-kube-api-access-xc9hw\") pod \"multus-admission-controller-857f4d67dd-j5nfj\" (UID: \"203a8e07-0bf9-4436-86eb-5e90cbe1a1d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.006197 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/60c3cf1b-18ef-48df-9be9-2e4219738896-srv-cert\") pod \"olm-operator-6b444d44fb-747wv\" (UID: \"60c3cf1b-18ef-48df-9be9-2e4219738896\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.006218 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tqzr\" (UniqueName: \"kubernetes.io/projected/60c3cf1b-18ef-48df-9be9-2e4219738896-kube-api-access-7tqzr\") pod \"olm-operator-6b444d44fb-747wv\" (UID: \"60c3cf1b-18ef-48df-9be9-2e4219738896\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.006466 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kqvf5"] Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.007406 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:05.507386308 +0000 UTC m=+142.482565179 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.030292 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.033865 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-5547l"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.036055 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.107153 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.107438 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/adbf4a97-c315-4f3e-a38a-2a27d5fe83eb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hqcjc\" (UID: \"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.107486 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21ffe44a-194c-4e7f-8af1-505d832867c1-config\") pod \"service-ca-operator-777779d784-5rthz\" (UID: \"21ffe44a-194c-4e7f-8af1-505d832867c1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.107525 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/45083805-b344-4b8d-8ab7-c90a7bcbebf2-apiservice-cert\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.107542 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/87ed1886-2066-4275-a791-68c51b5f3b26-proxy-tls\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.107557 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-registration-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.107589 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5462cde6-0d5f-4802-bf9b-374b14693d1e-profile-collector-cert\") pod \"catalog-operator-68c6474976-6442q\" (UID: \"5462cde6-0d5f-4802-bf9b-374b14693d1e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.107625 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff7d6318-cde1-4fe5-b726-9b8b73e8548e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qv6g9\" (UID: \"ff7d6318-cde1-4fe5-b726-9b8b73e8548e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.109115 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:05.609092817 +0000 UTC m=+142.584271398 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.109157 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66s6c\" (UniqueName: \"kubernetes.io/projected/45083805-b344-4b8d-8ab7-c90a7bcbebf2-kube-api-access-66s6c\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.109354 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5df61784-8d96-4183-b823-02154635d03b-node-bootstrap-token\") pod \"machine-config-server-87dhj\" (UID: \"5df61784-8d96-4183-b823-02154635d03b\") " pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.109393 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53e127fb-7e94-4ae5-b143-4e6f3d19f48d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ft5w7\" (UID: \"53e127fb-7e94-4ae5-b143-4e6f3d19f48d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.109615 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115189 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhhr2\" (UniqueName: \"kubernetes.io/projected/d26357e0-a31a-460e-94f0-3414790054e6-kube-api-access-fhhr2\") pod \"control-plane-machine-set-operator-78cbb6b69f-dh89s\" (UID: \"d26357e0-a31a-460e-94f0-3414790054e6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115310 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef24f96b-e093-473e-b51b-106c45d10ca1-config\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115338 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d26357e0-a31a-460e-94f0-3414790054e6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dh89s\" (UID: \"d26357e0-a31a-460e-94f0-3414790054e6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115367 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-mountpoint-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115410 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5462cde6-0d5f-4802-bf9b-374b14693d1e-srv-cert\") pod \"catalog-operator-68c6474976-6442q\" (UID: \"5462cde6-0d5f-4802-bf9b-374b14693d1e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115437 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53e127fb-7e94-4ae5-b143-4e6f3d19f48d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ft5w7\" (UID: \"53e127fb-7e94-4ae5-b143-4e6f3d19f48d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115490 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv6bd\" (UniqueName: \"kubernetes.io/projected/2bbe5ef4-60d9-4316-9a67-83c9eff14cbc-kube-api-access-nv6bd\") pod \"machine-config-controller-84d6567774-t4nwf\" (UID: \"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115515 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k9l9\" (UniqueName: \"kubernetes.io/projected/ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2-kube-api-access-2k9l9\") pod \"dns-operator-744455d44c-62btl\" (UID: \"ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2\") " pod="openshift-dns-operator/dns-operator-744455d44c-62btl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115622 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/87ed1886-2066-4275-a791-68c51b5f3b26-images\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115650 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-plugins-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115692 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2qz7\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-kube-api-access-d2qz7\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115721 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p98ks\" (UniqueName: \"kubernetes.io/projected/ff7d6318-cde1-4fe5-b726-9b8b73e8548e-kube-api-access-p98ks\") pod \"package-server-manager-789f6589d5-qv6g9\" (UID: \"ff7d6318-cde1-4fe5-b726-9b8b73e8548e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115749 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkvvk\" (UniqueName: \"kubernetes.io/projected/87ed1886-2066-4275-a791-68c51b5f3b26-kube-api-access-rkvvk\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115804 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2bbe5ef4-60d9-4316-9a67-83c9eff14cbc-proxy-tls\") pod \"machine-config-controller-84d6567774-t4nwf\" (UID: \"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115843 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026-cert\") pod \"ingress-canary-ns9p2\" (UID: \"d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026\") " pod="openshift-ingress-canary/ingress-canary-ns9p2" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.115882 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/45083805-b344-4b8d-8ab7-c90a7bcbebf2-tmpfs\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.117499 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/203a8e07-0bf9-4436-86eb-5e90cbe1a1d9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-j5nfj\" (UID: \"203a8e07-0bf9-4436-86eb-5e90cbe1a1d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.117565 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68kps\" (UniqueName: \"kubernetes.io/projected/c3d69d29-e875-49dd-8929-40ac7f00470d-kube-api-access-68kps\") pod \"collect-profiles-29330805-2bgnf\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.117588 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2-metrics-tls\") pod \"dns-operator-744455d44c-62btl\" (UID: \"ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2\") " pod="openshift-dns-operator/dns-operator-744455d44c-62btl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.117633 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5df61784-8d96-4183-b823-02154635d03b-certs\") pod \"machine-config-server-87dhj\" (UID: \"5df61784-8d96-4183-b823-02154635d03b\") " pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.117658 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/45083805-b344-4b8d-8ab7-c90a7bcbebf2-webhook-cert\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.117714 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfgd7\" (UniqueName: \"kubernetes.io/projected/21ffe44a-194c-4e7f-8af1-505d832867c1-kube-api-access-gfgd7\") pod \"service-ca-operator-777779d784-5rthz\" (UID: \"21ffe44a-194c-4e7f-8af1-505d832867c1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.117779 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-registry-certificates\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.117973 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adbf4a97-c315-4f3e-a38a-2a27d5fe83eb-config\") pod \"kube-apiserver-operator-766d6c64bb-hqcjc\" (UID: \"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118112 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/60c3cf1b-18ef-48df-9be9-2e4219738896-profile-collector-cert\") pod \"olm-operator-6b444d44fb-747wv\" (UID: \"60c3cf1b-18ef-48df-9be9-2e4219738896\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118144 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3d69d29-e875-49dd-8929-40ac7f00470d-config-volume\") pod \"collect-profiles-29330805-2bgnf\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118205 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-socket-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118229 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/adbf4a97-c315-4f3e-a38a-2a27d5fe83eb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hqcjc\" (UID: \"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118272 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eed23ae9-82ee-4db9-b353-70033e17e039-ca-trust-extracted\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118297 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-trusted-ca\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118329 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-bound-sa-token\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118359 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc9hw\" (UniqueName: \"kubernetes.io/projected/203a8e07-0bf9-4436-86eb-5e90cbe1a1d9-kube-api-access-xc9hw\") pod \"multus-admission-controller-857f4d67dd-j5nfj\" (UID: \"203a8e07-0bf9-4436-86eb-5e90cbe1a1d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118381 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/60c3cf1b-18ef-48df-9be9-2e4219738896-srv-cert\") pod \"olm-operator-6b444d44fb-747wv\" (UID: \"60c3cf1b-18ef-48df-9be9-2e4219738896\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118407 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tqzr\" (UniqueName: \"kubernetes.io/projected/60c3cf1b-18ef-48df-9be9-2e4219738896-kube-api-access-7tqzr\") pod \"olm-operator-6b444d44fb-747wv\" (UID: \"60c3cf1b-18ef-48df-9be9-2e4219738896\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118430 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/87ed1886-2066-4275-a791-68c51b5f3b26-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118469 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4t7r\" (UniqueName: \"kubernetes.io/projected/498e2dcc-7140-4963-bf03-062dad700275-kube-api-access-w4t7r\") pod \"dns-default-ljj62\" (UID: \"498e2dcc-7140-4963-bf03-062dad700275\") " pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118590 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/498e2dcc-7140-4963-bf03-062dad700275-metrics-tls\") pod \"dns-default-ljj62\" (UID: \"498e2dcc-7140-4963-bf03-062dad700275\") " pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118626 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3d69d29-e875-49dd-8929-40ac7f00470d-secret-volume\") pod \"collect-profiles-29330805-2bgnf\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118647 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ef24f96b-e093-473e-b51b-106c45d10ca1-etcd-client\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118686 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef24f96b-e093-473e-b51b-106c45d10ca1-serving-cert\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118707 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/ef24f96b-e093-473e-b51b-106c45d10ca1-etcd-ca\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118745 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzmfr\" (UniqueName: \"kubernetes.io/projected/ef24f96b-e093-473e-b51b-106c45d10ca1-kube-api-access-vzmfr\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118767 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zb6x\" (UniqueName: \"kubernetes.io/projected/5df61784-8d96-4183-b823-02154635d03b-kube-api-access-2zb6x\") pod \"machine-config-server-87dhj\" (UID: \"5df61784-8d96-4183-b823-02154635d03b\") " pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118788 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqcsh\" (UniqueName: \"kubernetes.io/projected/c2ede9ea-5c2e-4deb-8729-159facdfae12-kube-api-access-rqcsh\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.118837 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wzjp\" (UniqueName: \"kubernetes.io/projected/d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026-kube-api-access-2wzjp\") pod \"ingress-canary-ns9p2\" (UID: \"d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026\") " pod="openshift-ingress-canary/ingress-canary-ns9p2" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119097 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/158b1bc4-faf6-4c18-bb85-f0c586a04a19-signing-cabundle\") pod \"service-ca-9c57cc56f-fgbcz\" (UID: \"158b1bc4-faf6-4c18-bb85-f0c586a04a19\") " pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119155 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53e127fb-7e94-4ae5-b143-4e6f3d19f48d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ft5w7\" (UID: \"53e127fb-7e94-4ae5-b143-4e6f3d19f48d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119178 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8wt2\" (UniqueName: \"kubernetes.io/projected/a767981e-d60a-4dbb-961b-d44896100170-kube-api-access-j8wt2\") pod \"migrator-59844c95c7-zrhnd\" (UID: \"a767981e-d60a-4dbb-961b-d44896100170\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119202 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21ffe44a-194c-4e7f-8af1-505d832867c1-serving-cert\") pod \"service-ca-operator-777779d784-5rthz\" (UID: \"21ffe44a-194c-4e7f-8af1-505d832867c1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119260 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119287 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/ef24f96b-e093-473e-b51b-106c45d10ca1-etcd-service-ca\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119313 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/158b1bc4-faf6-4c18-bb85-f0c586a04a19-signing-key\") pod \"service-ca-9c57cc56f-fgbcz\" (UID: \"158b1bc4-faf6-4c18-bb85-f0c586a04a19\") " pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119352 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/498e2dcc-7140-4963-bf03-062dad700275-config-volume\") pod \"dns-default-ljj62\" (UID: \"498e2dcc-7140-4963-bf03-062dad700275\") " pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119378 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eed23ae9-82ee-4db9-b353-70033e17e039-installation-pull-secrets\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.119404 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpqjw\" (UniqueName: \"kubernetes.io/projected/5462cde6-0d5f-4802-bf9b-374b14693d1e-kube-api-access-zpqjw\") pod \"catalog-operator-68c6474976-6442q\" (UID: \"5462cde6-0d5f-4802-bf9b-374b14693d1e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.123072 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-registry-certificates\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.124236 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:05.624210935 +0000 UTC m=+142.599389726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.124626 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-registry-tls\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.124661 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2bbe5ef4-60d9-4316-9a67-83c9eff14cbc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-t4nwf\" (UID: \"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.124765 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw5ff\" (UniqueName: \"kubernetes.io/projected/158b1bc4-faf6-4c18-bb85-f0c586a04a19-kube-api-access-bw5ff\") pod \"service-ca-9c57cc56f-fgbcz\" (UID: \"158b1bc4-faf6-4c18-bb85-f0c586a04a19\") " pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.124791 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-csi-data-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.125565 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eed23ae9-82ee-4db9-b353-70033e17e039-ca-trust-extracted\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.126621 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2bbe5ef4-60d9-4316-9a67-83c9eff14cbc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-t4nwf\" (UID: \"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.126994 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-trusted-ca\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.127744 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.129271 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2bbe5ef4-60d9-4316-9a67-83c9eff14cbc-proxy-tls\") pod \"machine-config-controller-84d6567774-t4nwf\" (UID: \"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.133460 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5462cde6-0d5f-4802-bf9b-374b14693d1e-profile-collector-cert\") pod \"catalog-operator-68c6474976-6442q\" (UID: \"5462cde6-0d5f-4802-bf9b-374b14693d1e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.146154 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/60c3cf1b-18ef-48df-9be9-2e4219738896-profile-collector-cert\") pod \"olm-operator-6b444d44fb-747wv\" (UID: \"60c3cf1b-18ef-48df-9be9-2e4219738896\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.147493 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.151543 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/203a8e07-0bf9-4436-86eb-5e90cbe1a1d9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-j5nfj\" (UID: \"203a8e07-0bf9-4436-86eb-5e90cbe1a1d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.157893 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eed23ae9-82ee-4db9-b353-70033e17e039-installation-pull-secrets\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.158093 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5462cde6-0d5f-4802-bf9b-374b14693d1e-srv-cert\") pod \"catalog-operator-68c6474976-6442q\" (UID: \"5462cde6-0d5f-4802-bf9b-374b14693d1e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.158541 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/60c3cf1b-18ef-48df-9be9-2e4219738896-srv-cert\") pod \"olm-operator-6b444d44fb-747wv\" (UID: \"60c3cf1b-18ef-48df-9be9-2e4219738896\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.165176 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-registry-tls\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.167826 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-bound-sa-token\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.182268 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.183164 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc9hw\" (UniqueName: \"kubernetes.io/projected/203a8e07-0bf9-4436-86eb-5e90cbe1a1d9-kube-api-access-xc9hw\") pod \"multus-admission-controller-857f4d67dd-j5nfj\" (UID: \"203a8e07-0bf9-4436-86eb-5e90cbe1a1d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.216867 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tqzr\" (UniqueName: \"kubernetes.io/projected/60c3cf1b-18ef-48df-9be9-2e4219738896-kube-api-access-7tqzr\") pod \"olm-operator-6b444d44fb-747wv\" (UID: \"60c3cf1b-18ef-48df-9be9-2e4219738896\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.227500 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.227806 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adbf4a97-c315-4f3e-a38a-2a27d5fe83eb-config\") pod \"kube-apiserver-operator-766d6c64bb-hqcjc\" (UID: \"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.227840 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3d69d29-e875-49dd-8929-40ac7f00470d-config-volume\") pod \"collect-profiles-29330805-2bgnf\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.227883 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-socket-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.227905 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/adbf4a97-c315-4f3e-a38a-2a27d5fe83eb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hqcjc\" (UID: \"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.227925 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/87ed1886-2066-4275-a791-68c51b5f3b26-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.227961 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4t7r\" (UniqueName: \"kubernetes.io/projected/498e2dcc-7140-4963-bf03-062dad700275-kube-api-access-w4t7r\") pod \"dns-default-ljj62\" (UID: \"498e2dcc-7140-4963-bf03-062dad700275\") " pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.227987 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/498e2dcc-7140-4963-bf03-062dad700275-metrics-tls\") pod \"dns-default-ljj62\" (UID: \"498e2dcc-7140-4963-bf03-062dad700275\") " pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228005 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3d69d29-e875-49dd-8929-40ac7f00470d-secret-volume\") pod \"collect-profiles-29330805-2bgnf\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228051 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ef24f96b-e093-473e-b51b-106c45d10ca1-etcd-client\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228070 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef24f96b-e093-473e-b51b-106c45d10ca1-serving-cert\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228086 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/ef24f96b-e093-473e-b51b-106c45d10ca1-etcd-ca\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228120 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzmfr\" (UniqueName: \"kubernetes.io/projected/ef24f96b-e093-473e-b51b-106c45d10ca1-kube-api-access-vzmfr\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228139 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zb6x\" (UniqueName: \"kubernetes.io/projected/5df61784-8d96-4183-b823-02154635d03b-kube-api-access-2zb6x\") pod \"machine-config-server-87dhj\" (UID: \"5df61784-8d96-4183-b823-02154635d03b\") " pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228157 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqcsh\" (UniqueName: \"kubernetes.io/projected/c2ede9ea-5c2e-4deb-8729-159facdfae12-kube-api-access-rqcsh\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228177 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wzjp\" (UniqueName: \"kubernetes.io/projected/d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026-kube-api-access-2wzjp\") pod \"ingress-canary-ns9p2\" (UID: \"d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026\") " pod="openshift-ingress-canary/ingress-canary-ns9p2" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228211 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/158b1bc4-faf6-4c18-bb85-f0c586a04a19-signing-cabundle\") pod \"service-ca-9c57cc56f-fgbcz\" (UID: \"158b1bc4-faf6-4c18-bb85-f0c586a04a19\") " pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228251 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8wt2\" (UniqueName: \"kubernetes.io/projected/a767981e-d60a-4dbb-961b-d44896100170-kube-api-access-j8wt2\") pod \"migrator-59844c95c7-zrhnd\" (UID: \"a767981e-d60a-4dbb-961b-d44896100170\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228288 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53e127fb-7e94-4ae5-b143-4e6f3d19f48d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ft5w7\" (UID: \"53e127fb-7e94-4ae5-b143-4e6f3d19f48d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228308 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21ffe44a-194c-4e7f-8af1-505d832867c1-serving-cert\") pod \"service-ca-operator-777779d784-5rthz\" (UID: \"21ffe44a-194c-4e7f-8af1-505d832867c1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228357 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/ef24f96b-e093-473e-b51b-106c45d10ca1-etcd-service-ca\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228376 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/158b1bc4-faf6-4c18-bb85-f0c586a04a19-signing-key\") pod \"service-ca-9c57cc56f-fgbcz\" (UID: \"158b1bc4-faf6-4c18-bb85-f0c586a04a19\") " pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228395 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/498e2dcc-7140-4963-bf03-062dad700275-config-volume\") pod \"dns-default-ljj62\" (UID: \"498e2dcc-7140-4963-bf03-062dad700275\") " pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228461 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw5ff\" (UniqueName: \"kubernetes.io/projected/158b1bc4-faf6-4c18-bb85-f0c586a04a19-kube-api-access-bw5ff\") pod \"service-ca-9c57cc56f-fgbcz\" (UID: \"158b1bc4-faf6-4c18-bb85-f0c586a04a19\") " pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228479 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-csi-data-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228516 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/adbf4a97-c315-4f3e-a38a-2a27d5fe83eb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hqcjc\" (UID: \"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228540 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21ffe44a-194c-4e7f-8af1-505d832867c1-config\") pod \"service-ca-operator-777779d784-5rthz\" (UID: \"21ffe44a-194c-4e7f-8af1-505d832867c1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228558 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/45083805-b344-4b8d-8ab7-c90a7bcbebf2-apiservice-cert\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228593 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/87ed1886-2066-4275-a791-68c51b5f3b26-proxy-tls\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228612 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-registration-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228633 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff7d6318-cde1-4fe5-b726-9b8b73e8548e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qv6g9\" (UID: \"ff7d6318-cde1-4fe5-b726-9b8b73e8548e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228678 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66s6c\" (UniqueName: \"kubernetes.io/projected/45083805-b344-4b8d-8ab7-c90a7bcbebf2-kube-api-access-66s6c\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228695 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5df61784-8d96-4183-b823-02154635d03b-node-bootstrap-token\") pod \"machine-config-server-87dhj\" (UID: \"5df61784-8d96-4183-b823-02154635d03b\") " pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228713 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53e127fb-7e94-4ae5-b143-4e6f3d19f48d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ft5w7\" (UID: \"53e127fb-7e94-4ae5-b143-4e6f3d19f48d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228757 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhhr2\" (UniqueName: \"kubernetes.io/projected/d26357e0-a31a-460e-94f0-3414790054e6-kube-api-access-fhhr2\") pod \"control-plane-machine-set-operator-78cbb6b69f-dh89s\" (UID: \"d26357e0-a31a-460e-94f0-3414790054e6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228778 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef24f96b-e093-473e-b51b-106c45d10ca1-config\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228795 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d26357e0-a31a-460e-94f0-3414790054e6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dh89s\" (UID: \"d26357e0-a31a-460e-94f0-3414790054e6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228825 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-mountpoint-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228844 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53e127fb-7e94-4ae5-b143-4e6f3d19f48d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ft5w7\" (UID: \"53e127fb-7e94-4ae5-b143-4e6f3d19f48d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228869 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k9l9\" (UniqueName: \"kubernetes.io/projected/ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2-kube-api-access-2k9l9\") pod \"dns-operator-744455d44c-62btl\" (UID: \"ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2\") " pod="openshift-dns-operator/dns-operator-744455d44c-62btl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228901 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-plugins-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228927 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/87ed1886-2066-4275-a791-68c51b5f3b26-images\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228947 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p98ks\" (UniqueName: \"kubernetes.io/projected/ff7d6318-cde1-4fe5-b726-9b8b73e8548e-kube-api-access-p98ks\") pod \"package-server-manager-789f6589d5-qv6g9\" (UID: \"ff7d6318-cde1-4fe5-b726-9b8b73e8548e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.228978 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkvvk\" (UniqueName: \"kubernetes.io/projected/87ed1886-2066-4275-a791-68c51b5f3b26-kube-api-access-rkvvk\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.229006 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026-cert\") pod \"ingress-canary-ns9p2\" (UID: \"d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026\") " pod="openshift-ingress-canary/ingress-canary-ns9p2" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.229039 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/45083805-b344-4b8d-8ab7-c90a7bcbebf2-tmpfs\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.229070 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68kps\" (UniqueName: \"kubernetes.io/projected/c3d69d29-e875-49dd-8929-40ac7f00470d-kube-api-access-68kps\") pod \"collect-profiles-29330805-2bgnf\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.229088 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2-metrics-tls\") pod \"dns-operator-744455d44c-62btl\" (UID: \"ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2\") " pod="openshift-dns-operator/dns-operator-744455d44c-62btl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.229120 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5df61784-8d96-4183-b823-02154635d03b-certs\") pod \"machine-config-server-87dhj\" (UID: \"5df61784-8d96-4183-b823-02154635d03b\") " pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.229136 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/45083805-b344-4b8d-8ab7-c90a7bcbebf2-webhook-cert\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.229156 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfgd7\" (UniqueName: \"kubernetes.io/projected/21ffe44a-194c-4e7f-8af1-505d832867c1-kube-api-access-gfgd7\") pod \"service-ca-operator-777779d784-5rthz\" (UID: \"21ffe44a-194c-4e7f-8af1-505d832867c1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.229538 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:05.729517988 +0000 UTC m=+142.704696559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.230809 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3d69d29-e875-49dd-8929-40ac7f00470d-config-volume\") pod \"collect-profiles-29330805-2bgnf\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.231078 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-socket-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.235746 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/87ed1886-2066-4275-a791-68c51b5f3b26-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.245087 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-registration-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.253556 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adbf4a97-c315-4f3e-a38a-2a27d5fe83eb-config\") pod \"kube-apiserver-operator-766d6c64bb-hqcjc\" (UID: \"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.257374 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef24f96b-e093-473e-b51b-106c45d10ca1-config\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.257548 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/498e2dcc-7140-4963-bf03-062dad700275-config-volume\") pod \"dns-default-ljj62\" (UID: \"498e2dcc-7140-4963-bf03-062dad700275\") " pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.257596 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/45083805-b344-4b8d-8ab7-c90a7bcbebf2-tmpfs\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.257747 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-csi-data-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.258250 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-plugins-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.258303 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c2ede9ea-5c2e-4deb-8729-159facdfae12-mountpoint-dir\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.258344 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21ffe44a-194c-4e7f-8af1-505d832867c1-config\") pod \"service-ca-operator-777779d784-5rthz\" (UID: \"21ffe44a-194c-4e7f-8af1-505d832867c1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.258754 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/ef24f96b-e093-473e-b51b-106c45d10ca1-etcd-ca\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.258772 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/87ed1886-2066-4275-a791-68c51b5f3b26-images\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.261898 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53e127fb-7e94-4ae5-b143-4e6f3d19f48d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ft5w7\" (UID: \"53e127fb-7e94-4ae5-b143-4e6f3d19f48d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.263356 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/158b1bc4-faf6-4c18-bb85-f0c586a04a19-signing-cabundle\") pod \"service-ca-9c57cc56f-fgbcz\" (UID: \"158b1bc4-faf6-4c18-bb85-f0c586a04a19\") " pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.264529 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/ef24f96b-e093-473e-b51b-106c45d10ca1-etcd-service-ca\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.270808 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff7d6318-cde1-4fe5-b726-9b8b73e8548e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qv6g9\" (UID: \"ff7d6318-cde1-4fe5-b726-9b8b73e8548e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.271592 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53e127fb-7e94-4ae5-b143-4e6f3d19f48d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ft5w7\" (UID: \"53e127fb-7e94-4ae5-b143-4e6f3d19f48d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.273741 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpqjw\" (UniqueName: \"kubernetes.io/projected/5462cde6-0d5f-4802-bf9b-374b14693d1e-kube-api-access-zpqjw\") pod \"catalog-operator-68c6474976-6442q\" (UID: \"5462cde6-0d5f-4802-bf9b-374b14693d1e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.281145 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/158b1bc4-faf6-4c18-bb85-f0c586a04a19-signing-key\") pod \"service-ca-9c57cc56f-fgbcz\" (UID: \"158b1bc4-faf6-4c18-bb85-f0c586a04a19\") " pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.281145 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21ffe44a-194c-4e7f-8af1-505d832867c1-serving-cert\") pod \"service-ca-operator-777779d784-5rthz\" (UID: \"21ffe44a-194c-4e7f-8af1-505d832867c1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.281448 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/adbf4a97-c315-4f3e-a38a-2a27d5fe83eb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hqcjc\" (UID: \"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.281664 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/45083805-b344-4b8d-8ab7-c90a7bcbebf2-apiservice-cert\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.281837 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef24f96b-e093-473e-b51b-106c45d10ca1-serving-cert\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.282031 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/45083805-b344-4b8d-8ab7-c90a7bcbebf2-webhook-cert\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.282237 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d26357e0-a31a-460e-94f0-3414790054e6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dh89s\" (UID: \"d26357e0-a31a-460e-94f0-3414790054e6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.282644 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5df61784-8d96-4183-b823-02154635d03b-node-bootstrap-token\") pod \"machine-config-server-87dhj\" (UID: \"5df61784-8d96-4183-b823-02154635d03b\") " pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.283104 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ef24f96b-e093-473e-b51b-106c45d10ca1-etcd-client\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.287478 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2-metrics-tls\") pod \"dns-operator-744455d44c-62btl\" (UID: \"ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2\") " pod="openshift-dns-operator/dns-operator-744455d44c-62btl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.292634 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/87ed1886-2066-4275-a791-68c51b5f3b26-proxy-tls\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.296462 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026-cert\") pod \"ingress-canary-ns9p2\" (UID: \"d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026\") " pod="openshift-ingress-canary/ingress-canary-ns9p2" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.298365 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/498e2dcc-7140-4963-bf03-062dad700275-metrics-tls\") pod \"dns-default-ljj62\" (UID: \"498e2dcc-7140-4963-bf03-062dad700275\") " pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.299213 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2qz7\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-kube-api-access-d2qz7\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.304504 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3d69d29-e875-49dd-8929-40ac7f00470d-secret-volume\") pod \"collect-profiles-29330805-2bgnf\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.308228 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5df61784-8d96-4183-b823-02154635d03b-certs\") pod \"machine-config-server-87dhj\" (UID: \"5df61784-8d96-4183-b823-02154635d03b\") " pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.324299 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv6bd\" (UniqueName: \"kubernetes.io/projected/2bbe5ef4-60d9-4316-9a67-83c9eff14cbc-kube-api-access-nv6bd\") pod \"machine-config-controller-84d6567774-t4nwf\" (UID: \"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.327436 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfgd7\" (UniqueName: \"kubernetes.io/projected/21ffe44a-194c-4e7f-8af1-505d832867c1-kube-api-access-gfgd7\") pod \"service-ca-operator-777779d784-5rthz\" (UID: \"21ffe44a-194c-4e7f-8af1-505d832867c1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.327648 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53e127fb-7e94-4ae5-b143-4e6f3d19f48d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ft5w7\" (UID: \"53e127fb-7e94-4ae5-b143-4e6f3d19f48d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.329827 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.331181 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:05.831166116 +0000 UTC m=+142.806344697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.342427 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wzjp\" (UniqueName: \"kubernetes.io/projected/d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026-kube-api-access-2wzjp\") pod \"ingress-canary-ns9p2\" (UID: \"d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026\") " pod="openshift-ingress-canary/ingress-canary-ns9p2" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.346328 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" Oct 07 14:51:05 crc kubenswrapper[4672]: W1007 14:51:05.355149 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod943fe211_1f41_4621_a164_678a5a38ec82.slice/crio-83ee9b96a99f9623fdd3e1d32694ca299fc0e5d248d652f2a8c4a3694101a064 WatchSource:0}: Error finding container 83ee9b96a99f9623fdd3e1d32694ca299fc0e5d248d652f2a8c4a3694101a064: Status 404 returned error can't find the container with id 83ee9b96a99f9623fdd3e1d32694ca299fc0e5d248d652f2a8c4a3694101a064 Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.375475 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.378125 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.384639 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.392865 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4t7r\" (UniqueName: \"kubernetes.io/projected/498e2dcc-7140-4963-bf03-062dad700275-kube-api-access-w4t7r\") pod \"dns-default-ljj62\" (UID: \"498e2dcc-7140-4963-bf03-062dad700275\") " pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.395200 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-ns9p2" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.404503 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.408824 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhhr2\" (UniqueName: \"kubernetes.io/projected/d26357e0-a31a-460e-94f0-3414790054e6-kube-api-access-fhhr2\") pod \"control-plane-machine-set-operator-78cbb6b69f-dh89s\" (UID: \"d26357e0-a31a-460e-94f0-3414790054e6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.424179 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68kps\" (UniqueName: \"kubernetes.io/projected/c3d69d29-e875-49dd-8929-40ac7f00470d-kube-api-access-68kps\") pod \"collect-profiles-29330805-2bgnf\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.424617 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw5ff\" (UniqueName: \"kubernetes.io/projected/158b1bc4-faf6-4c18-bb85-f0c586a04a19-kube-api-access-bw5ff\") pod \"service-ca-9c57cc56f-fgbcz\" (UID: \"158b1bc4-faf6-4c18-bb85-f0c586a04a19\") " pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.431259 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.431666 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:05.931653062 +0000 UTC m=+142.906831643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: W1007 14:51:05.438477 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7ae761c_0c0b_4949_964e_ac6a76720d41.slice/crio-8da8a8cc57ffe46e4031f253685013eb8dd629b15894aab6a66869af99f52e84 WatchSource:0}: Error finding container 8da8a8cc57ffe46e4031f253685013eb8dd629b15894aab6a66869af99f52e84: Status 404 returned error can't find the container with id 8da8a8cc57ffe46e4031f253685013eb8dd629b15894aab6a66869af99f52e84 Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.438704 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:05 crc kubenswrapper[4672]: W1007 14:51:05.441245 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f715b04_a647_46de_8588_bff20df5820f.slice/crio-7f4170462f039fc3201b2673839d9726a019c97a145a0e5b6511ffaabe4f8e77 WatchSource:0}: Error finding container 7f4170462f039fc3201b2673839d9726a019c97a145a0e5b6511ffaabe4f8e77: Status 404 returned error can't find the container with id 7f4170462f039fc3201b2673839d9726a019c97a145a0e5b6511ffaabe4f8e77 Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.451553 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66s6c\" (UniqueName: \"kubernetes.io/projected/45083805-b344-4b8d-8ab7-c90a7bcbebf2-kube-api-access-66s6c\") pod \"packageserver-d55dfcdfc-xcwwl\" (UID: \"45083805-b344-4b8d-8ab7-c90a7bcbebf2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.456121 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.463436 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.478244 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/adbf4a97-c315-4f3e-a38a-2a27d5fe83eb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hqcjc\" (UID: \"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.480558 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.500375 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p98ks\" (UniqueName: \"kubernetes.io/projected/ff7d6318-cde1-4fe5-b726-9b8b73e8548e-kube-api-access-p98ks\") pod \"package-server-manager-789f6589d5-qv6g9\" (UID: \"ff7d6318-cde1-4fe5-b726-9b8b73e8548e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.502817 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k9l9\" (UniqueName: \"kubernetes.io/projected/ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2-kube-api-access-2k9l9\") pod \"dns-operator-744455d44c-62btl\" (UID: \"ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2\") " pod="openshift-dns-operator/dns-operator-744455d44c-62btl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.503134 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.518186 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" event={"ID":"46244c53-c30f-4b0d-9e2f-873bc7c7c660","Type":"ContainerStarted","Data":"9caaec24ac1392d6dccc7e54dd1df1c67a4b58352669e71a16945062cdc912c4"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.519448 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" event={"ID":"2b444f1c-1587-4f49-9235-7313b6284a43","Type":"ContainerStarted","Data":"108b46c52063133eeb6c3442cfdbf0ec4c16c5415a3ecc19ae9a18d61c32d437"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.520968 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkvvk\" (UniqueName: \"kubernetes.io/projected/87ed1886-2066-4275-a791-68c51b5f3b26-kube-api-access-rkvvk\") pod \"machine-config-operator-74547568cd-vl5k6\" (UID: \"87ed1886-2066-4275-a791-68c51b5f3b26\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.524696 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" event={"ID":"d7ae761c-0c0b-4949-964e-ac6a76720d41","Type":"ContainerStarted","Data":"8da8a8cc57ffe46e4031f253685013eb8dd629b15894aab6a66869af99f52e84"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.528635 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.530274 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" event={"ID":"93d4decb-aa9d-40aa-8e02-c6557c64aacb","Type":"ContainerStarted","Data":"d521a29564aa24c3adeb03cd44b90565ac7b847843631c10934840abc8211e68"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.530326 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" event={"ID":"93d4decb-aa9d-40aa-8e02-c6557c64aacb","Type":"ContainerStarted","Data":"8dae3a694362edf45790ad0770354f7dbe718157741762d9536e83001b119ad1"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.532347 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.532659 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.032647982 +0000 UTC m=+143.007826563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.534700 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" event={"ID":"1f715b04-a647-46de-8588-bff20df5820f","Type":"ContainerStarted","Data":"7f4170462f039fc3201b2673839d9726a019c97a145a0e5b6511ffaabe4f8e77"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.539784 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzmfr\" (UniqueName: \"kubernetes.io/projected/ef24f96b-e093-473e-b51b-106c45d10ca1-kube-api-access-vzmfr\") pod \"etcd-operator-b45778765-9krvm\" (UID: \"ef24f96b-e093-473e-b51b-106c45d10ca1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.548571 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5547l" event={"ID":"84ee05d6-7689-446b-a8b6-4e186bbbec44","Type":"ContainerStarted","Data":"abaa98dcfed8289b943b265d5fb277825db2b11d26662b549198b2b29528f62c"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.556576 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.566263 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zb6x\" (UniqueName: \"kubernetes.io/projected/5df61784-8d96-4183-b823-02154635d03b-kube-api-access-2zb6x\") pod \"machine-config-server-87dhj\" (UID: \"5df61784-8d96-4183-b823-02154635d03b\") " pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.570921 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.578206 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" event={"ID":"ef4ea638-65f8-4a4b-a587-0ac860e0478d","Type":"ContainerStarted","Data":"c63c8955efcbd188a87ecd4de606f0f23fed27b2ec03387d649aa4da85d5ba36"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.580800 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.587419 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-n8l22" event={"ID":"943fe211-1f41-4621-a164-678a5a38ec82","Type":"ContainerStarted","Data":"83ee9b96a99f9623fdd3e1d32694ca299fc0e5d248d652f2a8c4a3694101a064"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.591299 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-62btl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.602380 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.603273 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqcsh\" (UniqueName: \"kubernetes.io/projected/c2ede9ea-5c2e-4deb-8729-159facdfae12-kube-api-access-rqcsh\") pod \"csi-hostpathplugin-qpgsl\" (UID: \"c2ede9ea-5c2e-4deb-8729-159facdfae12\") " pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.630771 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" event={"ID":"13a87120-7360-414a-8647-2f6e962db2a2","Type":"ContainerStarted","Data":"c7ae8eff58e187c309cfc721adfc6dbd1d12e9465f141b2552957ae2b3cd978f"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.637528 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.637722 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.638423 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.638540 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.13850691 +0000 UTC m=+143.113685491 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.638635 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8wt2\" (UniqueName: \"kubernetes.io/projected/a767981e-d60a-4dbb-961b-d44896100170-kube-api-access-j8wt2\") pod \"migrator-59844c95c7-zrhnd\" (UID: \"a767981e-d60a-4dbb-961b-d44896100170\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.638758 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.639206 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.644859 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.653921 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5fqpf"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.664352 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" event={"ID":"d45d6f67-5e1c-4892-81c4-f6f227caa2d5","Type":"ContainerStarted","Data":"deef201da525e71eae7cc41de0849410c8ff56b8e93ec1f9b1826ba1421fcd6f"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.670756 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-trnl6"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.673892 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-bwrwj"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.678822 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.682574 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-87dhj" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.703534 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hknhb"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.729549 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" event={"ID":"435345a0-9e68-4178-b036-b919092da385","Type":"ContainerStarted","Data":"81310519c3d073201fa955b6feb18a1c786686a010c996e87050fad94376dccf"} Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.740989 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.741551 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.241537064 +0000 UTC m=+143.216715645 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.747254 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh"] Oct 07 14:51:05 crc kubenswrapper[4672]: W1007 14:51:05.751678 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd583d73_11c6_4955_b98f_e490f24a239e.slice/crio-7047448cbc5957ff0e36aa085a12b14505633cf2047caacae23e16551927a202 WatchSource:0}: Error finding container 7047448cbc5957ff0e36aa085a12b14505633cf2047caacae23e16551927a202: Status 404 returned error can't find the container with id 7047448cbc5957ff0e36aa085a12b14505633cf2047caacae23e16551927a202 Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.759961 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-rx4tl"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.764603 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-5rthz"] Oct 07 14:51:05 crc kubenswrapper[4672]: W1007 14:51:05.771736 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7100c20d_df6c_4382_856e_68e823252e84.slice/crio-796c1516738dc0b7ba6f74f7ae722b6de0ba0d72d7f1d7c3a3e8a5f36e612c6d WatchSource:0}: Error finding container 796c1516738dc0b7ba6f74f7ae722b6de0ba0d72d7f1d7c3a3e8a5f36e612c6d: Status 404 returned error can't find the container with id 796c1516738dc0b7ba6f74f7ae722b6de0ba0d72d7f1d7c3a3e8a5f36e612c6d Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.782581 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5t2r9"] Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.842324 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd" Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.842778 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.844876 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.344854884 +0000 UTC m=+143.320033525 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:05 crc kubenswrapper[4672]: W1007 14:51:05.871416 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5715dd2_f3be_4f8d_a4a3_c7d0ce56abc0.slice/crio-886fac729d64bedf37a106d9e0539feee1371aecf1d76777e48b56dc3063143c WatchSource:0}: Error finding container 886fac729d64bedf37a106d9e0539feee1371aecf1d76777e48b56dc3063143c: Status 404 returned error can't find the container with id 886fac729d64bedf37a106d9e0539feee1371aecf1d76777e48b56dc3063143c Oct 07 14:51:05 crc kubenswrapper[4672]: I1007 14:51:05.944771 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:05 crc kubenswrapper[4672]: E1007 14:51:05.945225 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.445211507 +0000 UTC m=+143.420390088 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.047627 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.048042 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.547995684 +0000 UTC m=+143.523174265 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.149405 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.150445 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.650426382 +0000 UTC m=+143.625604963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.251891 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.252448 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.752423269 +0000 UTC m=+143.727601860 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.346317 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-ns9p2"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.356788 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.357381 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.857365352 +0000 UTC m=+143.832543943 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.462894 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.463168 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.963139048 +0000 UTC m=+143.938317639 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.470457 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.477175 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:06.977155767 +0000 UTC m=+143.952334348 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.483767 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.510245 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9krvm"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.538942 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.574185 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ljj62"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.579178 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.579559 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.079541914 +0000 UTC m=+144.054720495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.594979 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.601994 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.663869 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.690748 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.691085 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.191074802 +0000 UTC m=+144.166253383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: W1007 14:51:06.702936 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef24f96b_e093_473e_b51b_106c45d10ca1.slice/crio-799290bc9ff25d3191e1f5d077dd4997cb232a508cebb2b0841b948051d9d3ae WatchSource:0}: Error finding container 799290bc9ff25d3191e1f5d077dd4997cb232a508cebb2b0841b948051d9d3ae: Status 404 returned error can't find the container with id 799290bc9ff25d3191e1f5d077dd4997cb232a508cebb2b0841b948051d9d3ae Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.713634 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.731457 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-j5nfj"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.781706 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" event={"ID":"53e127fb-7e94-4ae5-b143-4e6f3d19f48d","Type":"ContainerStarted","Data":"0d9e90b50ed6bfe56456b63a74aca736bfac9e9927463eefddb8b72a965ca7ce"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.793726 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.794150 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.294133766 +0000 UTC m=+144.269312347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.794417 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" event={"ID":"d9039ee1-eaf1-4b12-9849-909d95692fda","Type":"ContainerStarted","Data":"352a03110ad5aceb93f4f4f841560368e7c71c0846349adeb1f7c1e9305e2724"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.796179 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" event={"ID":"d7ae761c-0c0b-4949-964e-ac6a76720d41","Type":"ContainerStarted","Data":"5db1fa352fd2d34519580426e602e0ef565880c981d462f44cc5357f1d47c242"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.804892 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" event={"ID":"e626da5b-36d9-405a-b44e-fb6c355a51ba","Type":"ContainerStarted","Data":"775fc33a6fab06b48bf277f0de1525a3d1ff88e7f719c4eda1e230db1d44ccd1"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.808305 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5fqpf" event={"ID":"7100c20d-df6c-4382-856e-68e823252e84","Type":"ContainerStarted","Data":"796c1516738dc0b7ba6f74f7ae722b6de0ba0d72d7f1d7c3a3e8a5f36e612c6d"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.812632 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" event={"ID":"764b1457-5fdb-4ba1-baf0-5aefbc2271ca","Type":"ContainerStarted","Data":"737c27dc4a7df760e8f8fcc523ef164705e08a316ef4575e9010dd942321f7aa"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.821587 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" event={"ID":"2b444f1c-1587-4f49-9235-7313b6284a43","Type":"ContainerStarted","Data":"731cb043a0c490f24baecefcb0a5a031183d14fd93273c253459f9c313959760"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.827960 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" event={"ID":"60c3cf1b-18ef-48df-9be9-2e4219738896","Type":"ContainerStarted","Data":"5793c651951de335d5ea2b4725fbcc53203352a83b9c13a8f17cbeaefd0e66de"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.835977 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" event={"ID":"21ffe44a-194c-4e7f-8af1-505d832867c1","Type":"ContainerStarted","Data":"a840c96135cee47df156398bcb6f89c9d8e2b0bad609ed758177ee91d4ff91ce"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.874452 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.879093 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-62btl"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.882812 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fgbcz"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.885071 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qpgsl"] Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.898292 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:06 crc kubenswrapper[4672]: E1007 14:51:06.901517 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.401500124 +0000 UTC m=+144.376678705 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.937639 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" event={"ID":"435345a0-9e68-4178-b036-b919092da385","Type":"ContainerStarted","Data":"9f7b2bd73f755e89715defb638c5a93d6130ddc0a0ca0be06551f73d7529d555"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.940753 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-87dhj" event={"ID":"5df61784-8d96-4183-b823-02154635d03b","Type":"ContainerStarted","Data":"3f9323bd81e57a8f423cfb75b91553db1d3224111a0b1d09ca93a6b8c4454187"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.971537 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-ns9p2" event={"ID":"d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026","Type":"ContainerStarted","Data":"4a1a7b6d25ba93babd1d35a938eb00e28de231917270f987dd222f188c29e20f"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.981160 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" event={"ID":"5462cde6-0d5f-4802-bf9b-374b14693d1e","Type":"ContainerStarted","Data":"1eadcd0024b185283ab23b0514e98f68498aeec7e06b5cb25cf511f2d8774f87"} Oct 07 14:51:06 crc kubenswrapper[4672]: W1007 14:51:06.988210 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podadbf4a97_c315_4f3e_a38a_2a27d5fe83eb.slice/crio-310201c839db5536848e61c5c72db8156a1fd8e94009c185cbc14e2562495912 WatchSource:0}: Error finding container 310201c839db5536848e61c5c72db8156a1fd8e94009c185cbc14e2562495912: Status 404 returned error can't find the container with id 310201c839db5536848e61c5c72db8156a1fd8e94009c185cbc14e2562495912 Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.991832 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" event={"ID":"ef24f96b-e093-473e-b51b-106c45d10ca1","Type":"ContainerStarted","Data":"799290bc9ff25d3191e1f5d077dd4997cb232a508cebb2b0841b948051d9d3ae"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.994914 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" event={"ID":"0fe61e6a-3e91-41df-b397-47f2d55b6b5f","Type":"ContainerStarted","Data":"efecf8048c9c2f559160f41f188c5e4ea22a8d64bf5056f1b954532fe884f1cb"} Oct 07 14:51:06 crc kubenswrapper[4672]: I1007 14:51:06.994967 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" event={"ID":"0fe61e6a-3e91-41df-b397-47f2d55b6b5f","Type":"ContainerStarted","Data":"2983391de6897b09923165d54b03322619e800ef16c202c835a86e5e9d9c7137"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.000341 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.006286 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-kqvf5" podStartSLOduration=123.006264803 podStartE2EDuration="2m3.006264803s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.006039717 +0000 UTC m=+143.981218308" watchObservedRunningTime="2025-10-07 14:51:07.006264803 +0000 UTC m=+143.981443394" Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.006448 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.506414997 +0000 UTC m=+144.481593568 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.006920 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.008157 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.508142102 +0000 UTC m=+144.483320783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.022002 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5547l" event={"ID":"84ee05d6-7689-446b-a8b6-4e186bbbec44","Type":"ContainerStarted","Data":"aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.027753 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" event={"ID":"ef4ea638-65f8-4a4b-a587-0ac860e0478d","Type":"ContainerStarted","Data":"8df1c8e7ee7f461c60de11b77b11c7492aeb4bdf892318892ce8ec12b8e70892"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.035067 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf"] Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.039771 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" event={"ID":"684124fe-9cb9-4aa2-962d-9699e29f9ec5","Type":"ContainerStarted","Data":"dbbc46cd233fd7d8b9bf27a1b993966a69dcc9060920af8f1248a790af4b2cda"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.073157 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" event={"ID":"93d4decb-aa9d-40aa-8e02-c6557c64aacb","Type":"ContainerStarted","Data":"504edb60c51ebcdc483827d4208281768422b49ece78368e1fb66dc9bfb3edbe"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.105725 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd"] Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.108009 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" event={"ID":"cd583d73-11c6-4955-b98f-e490f24a239e","Type":"ContainerStarted","Data":"7047448cbc5957ff0e36aa085a12b14505633cf2047caacae23e16551927a202"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.109497 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.112277 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.612261755 +0000 UTC m=+144.587440336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.120892 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rx4tl" event={"ID":"e3d90d2e-7f97-400b-a29b-7bf63a75b43c","Type":"ContainerStarted","Data":"ce7351ecf93819cb979a1d111e00f533db833a5943db38acabd30f7b2fcbb158"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.121832 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-rx4tl" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.138400 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.138452 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.159339 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" event={"ID":"1f715b04-a647-46de-8588-bff20df5820f","Type":"ContainerStarted","Data":"452b3bbe9b67bc4733f105dea725f73ca3c05aadb302112f62b7d45eda976ec5"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.161271 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.166129 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-n8l22" event={"ID":"943fe211-1f41-4621-a164-678a5a38ec82","Type":"ContainerStarted","Data":"7ded8a0814cc7e5e7bb3e266015e0dfac018193a2d0b7aa865f7a40dfebece4c"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.168692 4672 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-dxhdb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.168748 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" podUID="1f715b04-a647-46de-8588-bff20df5820f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.172423 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" event={"ID":"13a87120-7360-414a-8647-2f6e962db2a2","Type":"ContainerStarted","Data":"c5f14015654c228925eb83e1192081ec81bec78e96e90e636b72c8788575cb11"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.175477 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ljj62" event={"ID":"498e2dcc-7140-4963-bf03-062dad700275","Type":"ContainerStarted","Data":"417414d632c73f4c80ce7e53bfd00f81a7eabcc660c1be4bbc4bb5cc08562f66"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.176506 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9"] Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.182559 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" event={"ID":"d45d6f67-5e1c-4892-81c4-f6f227caa2d5","Type":"ContainerStarted","Data":"af91ef0d27c79c27b86df9657e0f1c787d67157adc442b8275c16a7fea261e1a"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.189160 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.190606 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl"] Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.197698 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:07 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:07 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:07 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.197762 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.198415 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5fs7" podStartSLOduration=122.198389093 podStartE2EDuration="2m2.198389093s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.197485599 +0000 UTC m=+144.172664200" watchObservedRunningTime="2025-10-07 14:51:07.198389093 +0000 UTC m=+144.173567674" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.203372 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" event={"ID":"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0","Type":"ContainerStarted","Data":"886fac729d64bedf37a106d9e0539feee1371aecf1d76777e48b56dc3063143c"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.203782 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.211725 4672 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5t2r9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.212226 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" podUID="f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.212279 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.214453 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.714436226 +0000 UTC m=+144.689614887 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.214449 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" event={"ID":"46244c53-c30f-4b0d-9e2f-873bc7c7c660","Type":"ContainerStarted","Data":"0a9323c40879beb5fd465833a7653849c7e4e986dfac7ef28294d0d83e93ea84"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.214699 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.216728 4672 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-rsb29 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.216802 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" podUID="46244c53-c30f-4b0d-9e2f-873bc7c7c660" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.223730 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" event={"ID":"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305","Type":"ContainerStarted","Data":"d5f5d968a82fae1b5b60fcbddc956af3fd4c1030b3dcdf9e95130ab4f6c7aed3"} Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.241932 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-rx4tl" podStartSLOduration=123.241910769 podStartE2EDuration="2m3.241910769s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.23736475 +0000 UTC m=+144.212543351" watchObservedRunningTime="2025-10-07 14:51:07.241910769 +0000 UTC m=+144.217089350" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.284731 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-5547l" podStartSLOduration=123.284713617 podStartE2EDuration="2m3.284713617s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.280515486 +0000 UTC m=+144.255694087" watchObservedRunningTime="2025-10-07 14:51:07.284713617 +0000 UTC m=+144.259892198" Oct 07 14:51:07 crc kubenswrapper[4672]: W1007 14:51:07.297439 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45083805_b344_4b8d_8ab7_c90a7bcbebf2.slice/crio-8a53210ee15e184013b20f4d5fe4a91b23191d9e8473f522dd67d296c249e951 WatchSource:0}: Error finding container 8a53210ee15e184013b20f4d5fe4a91b23191d9e8473f522dd67d296c249e951: Status 404 returned error can't find the container with id 8a53210ee15e184013b20f4d5fe4a91b23191d9e8473f522dd67d296c249e951 Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.314325 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.314501 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.814473511 +0000 UTC m=+144.789652092 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.314891 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.316477 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.816436522 +0000 UTC m=+144.791615093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.323861 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" podStartSLOduration=122.323842217 podStartE2EDuration="2m2.323842217s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.318395504 +0000 UTC m=+144.293574105" watchObservedRunningTime="2025-10-07 14:51:07.323842217 +0000 UTC m=+144.299020798" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.359741 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-w45mb" podStartSLOduration=122.359724272 podStartE2EDuration="2m2.359724272s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.358131451 +0000 UTC m=+144.333310052" watchObservedRunningTime="2025-10-07 14:51:07.359724272 +0000 UTC m=+144.334902863" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.415965 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.416636 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:07.916610811 +0000 UTC m=+144.891789402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.484461 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c5dx9" podStartSLOduration=123.484444487 podStartE2EDuration="2m3.484444487s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.450482523 +0000 UTC m=+144.425661104" watchObservedRunningTime="2025-10-07 14:51:07.484444487 +0000 UTC m=+144.459623068" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.519221 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-n2wt6" podStartSLOduration=123.519200683 podStartE2EDuration="2m3.519200683s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.482993829 +0000 UTC m=+144.458172410" watchObservedRunningTime="2025-10-07 14:51:07.519200683 +0000 UTC m=+144.494379264" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.519786 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.520212 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.020200049 +0000 UTC m=+144.995378630 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.520640 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" podStartSLOduration=122.52062941 podStartE2EDuration="2m2.52062941s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.518820913 +0000 UTC m=+144.493999494" watchObservedRunningTime="2025-10-07 14:51:07.52062941 +0000 UTC m=+144.495807991" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.560505 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7cr2m" podStartSLOduration=123.56048525 podStartE2EDuration="2m3.56048525s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.558241921 +0000 UTC m=+144.533420532" watchObservedRunningTime="2025-10-07 14:51:07.56048525 +0000 UTC m=+144.535663831" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.597108 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zk6wm" podStartSLOduration=123.597086494 podStartE2EDuration="2m3.597086494s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.596719444 +0000 UTC m=+144.571898035" watchObservedRunningTime="2025-10-07 14:51:07.597086494 +0000 UTC m=+144.572265075" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.621034 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.621259 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.12122578 +0000 UTC m=+145.096404371 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.621875 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.622223 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.122210106 +0000 UTC m=+145.097388687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.654516 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-n8l22" podStartSLOduration=123.654478646 podStartE2EDuration="2m3.654478646s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.6523493 +0000 UTC m=+144.627527881" watchObservedRunningTime="2025-10-07 14:51:07.654478646 +0000 UTC m=+144.629657217" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.683360 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" podStartSLOduration=123.683342396 podStartE2EDuration="2m3.683342396s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:07.682899074 +0000 UTC m=+144.658077655" watchObservedRunningTime="2025-10-07 14:51:07.683342396 +0000 UTC m=+144.658520977" Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.722893 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.723097 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.223064952 +0000 UTC m=+145.198243543 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.723466 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.723858 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.223835472 +0000 UTC m=+145.199014053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.823880 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.824144 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.324125374 +0000 UTC m=+145.299303955 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:07 crc kubenswrapper[4672]: I1007 14:51:07.925588 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:07 crc kubenswrapper[4672]: E1007 14:51:07.926169 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.426156231 +0000 UTC m=+145.401334812 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.027581 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.027934 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.527755287 +0000 UTC m=+145.502933888 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.027965 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.028424 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.528413754 +0000 UTC m=+145.503592335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.129256 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.129486 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.629459446 +0000 UTC m=+145.604638027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.129763 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.130234 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.630226256 +0000 UTC m=+145.605404837 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.188289 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:08 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:08 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:08 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.188356 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.230357 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.230661 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.73063205 +0000 UTC m=+145.705810641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.230757 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.231162 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.731151424 +0000 UTC m=+145.706330075 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.237507 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" event={"ID":"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb","Type":"ContainerStarted","Data":"310201c839db5536848e61c5c72db8156a1fd8e94009c185cbc14e2562495912"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.240330 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" event={"ID":"ef24f96b-e093-473e-b51b-106c45d10ca1","Type":"ContainerStarted","Data":"4e734477489eedeb580af3864d63e51398aaabb2ae0ed69b8870fb7603671d5d"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.241930 4672 generic.go:334] "Generic (PLEG): container finished" podID="764b1457-5fdb-4ba1-baf0-5aefbc2271ca" containerID="1cabfc3f6446cece19121f5c24675a92a38ad68c9b6d0989c2b802a433a6dea9" exitCode=0 Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.242188 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" event={"ID":"764b1457-5fdb-4ba1-baf0-5aefbc2271ca","Type":"ContainerDied","Data":"1cabfc3f6446cece19121f5c24675a92a38ad68c9b6d0989c2b802a433a6dea9"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.248808 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5fqpf" event={"ID":"7100c20d-df6c-4382-856e-68e823252e84","Type":"ContainerStarted","Data":"321cbb5de83f0842b754a571ff702151d76681b2da47b425dbf4c3694ee4f373"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.249387 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.253494 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-ns9p2" event={"ID":"d4ba90af-9de8-4c2f-8bdc-a9b6b4df6026","Type":"ContainerStarted","Data":"4f4c30a2f5797013dd88d2d7ff07f1504058d0b31a1c026ab979bbb81fe00c0e"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.254084 4672 patch_prober.go:28] interesting pod/console-operator-58897d9998-5fqpf container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.254146 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-5fqpf" podUID="7100c20d-df6c-4382-856e-68e823252e84" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.265150 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-9krvm" podStartSLOduration=124.265132799 podStartE2EDuration="2m4.265132799s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.263400713 +0000 UTC m=+145.238579304" watchObservedRunningTime="2025-10-07 14:51:08.265132799 +0000 UTC m=+145.240311380" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.266556 4672 generic.go:334] "Generic (PLEG): container finished" podID="684124fe-9cb9-4aa2-962d-9699e29f9ec5" containerID="d4a3adf16edbd35e61247046ffadfbdfd5b51436145a9aa1b62b1214e076869f" exitCode=0 Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.266632 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" event={"ID":"684124fe-9cb9-4aa2-962d-9699e29f9ec5","Type":"ContainerDied","Data":"d4a3adf16edbd35e61247046ffadfbdfd5b51436145a9aa1b62b1214e076869f"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.283320 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-5fqpf" podStartSLOduration=124.283300867 podStartE2EDuration="2m4.283300867s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.280524514 +0000 UTC m=+145.255703095" watchObservedRunningTime="2025-10-07 14:51:08.283300867 +0000 UTC m=+145.258479448" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.304320 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" event={"ID":"c3d69d29-e875-49dd-8929-40ac7f00470d","Type":"ContainerStarted","Data":"35e43ae13f3745390f55b45967d09eb927f2314ee49c8c00c7b94044235b5f91"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.317436 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ljj62" event={"ID":"498e2dcc-7140-4963-bf03-062dad700275","Type":"ContainerStarted","Data":"2ca38abcfa788f996a5958bd82dc20e8d451229c52d6cf128a1c54ae6e675880"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.326650 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" event={"ID":"d9039ee1-eaf1-4b12-9849-909d95692fda","Type":"ContainerStarted","Data":"2fc28cf626d7bc84df39cfe53221db7ddf8d2ff6994a585a5849ee7173e2f447"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.327910 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" event={"ID":"60c3cf1b-18ef-48df-9be9-2e4219738896","Type":"ContainerStarted","Data":"64fb131b76f00c1a56499e062ab7f6f1431de283c48a31b758c1bc5ad37a7f41"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.328325 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.330529 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" event={"ID":"158b1bc4-faf6-4c18-bb85-f0c586a04a19","Type":"ContainerStarted","Data":"98d565624ba1a6091cd34d404f5be80926e1a3c97fc25bd0d70f5c53076abf20"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.331721 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.331853 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.831828246 +0000 UTC m=+145.807006837 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.331875 4672 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-747wv container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.331915 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" podUID="60c3cf1b-18ef-48df-9be9-2e4219738896" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.332210 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.332657 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.832647757 +0000 UTC m=+145.807826338 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.341200 4672 generic.go:334] "Generic (PLEG): container finished" podID="cd583d73-11c6-4955-b98f-e490f24a239e" containerID="8ca5ff733aae2783c1804e0ebb9727afaffc907567b312af3f1d328c82bc69bc" exitCode=0 Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.341318 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" event={"ID":"cd583d73-11c6-4955-b98f-e490f24a239e","Type":"ContainerDied","Data":"8ca5ff733aae2783c1804e0ebb9727afaffc907567b312af3f1d328c82bc69bc"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.348207 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-ns9p2" podStartSLOduration=6.348191647 podStartE2EDuration="6.348191647s" podCreationTimestamp="2025-10-07 14:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.344117619 +0000 UTC m=+145.319296200" watchObservedRunningTime="2025-10-07 14:51:08.348191647 +0000 UTC m=+145.323370228" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.353410 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-62btl" event={"ID":"ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2","Type":"ContainerStarted","Data":"49af582938e953c55fd51d8d91dd28f7452aaaa9edb9441a7939af6dc04eff37"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.367829 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" event={"ID":"ff7d6318-cde1-4fe5-b726-9b8b73e8548e","Type":"ContainerStarted","Data":"1684ecc2170ee778a84ca0e8111508ef00ebd5f1d7be34961a58f91d60c6f4b0"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.374916 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" event={"ID":"87ed1886-2066-4275-a791-68c51b5f3b26","Type":"ContainerStarted","Data":"a646cb8287fb8b2028b27e257ced91d9460a275912123cf9b988673733aad5d1"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.374974 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" event={"ID":"87ed1886-2066-4275-a791-68c51b5f3b26","Type":"ContainerStarted","Data":"9dd45cf76d26d18c254699acb1af5a0a36d47f3c91acb87c03467f4174398773"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.397924 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-87dhj" event={"ID":"5df61784-8d96-4183-b823-02154635d03b","Type":"ContainerStarted","Data":"0f3b15e9993b86efb58af8d586519017a5dde3a1908f1e21f741431627c06da0"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.399032 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" event={"ID":"203a8e07-0bf9-4436-86eb-5e90cbe1a1d9","Type":"ContainerStarted","Data":"9fa2110bba4c11c45ad980ba9819d6ad4a6ea9764d7265fbd5dc88c842819e26"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.403661 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" event={"ID":"d26357e0-a31a-460e-94f0-3414790054e6","Type":"ContainerStarted","Data":"d87f11992a82b9f1c401e435192d85a14c14b64796d1681793a0340a16dca12b"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.403712 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" event={"ID":"d26357e0-a31a-460e-94f0-3414790054e6","Type":"ContainerStarted","Data":"7d47771fdb2b257ab2b65964cc55a4cec8a0c3baaaf2b44b78e6cd1d2a0c4548"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.413657 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" event={"ID":"5462cde6-0d5f-4802-bf9b-374b14693d1e","Type":"ContainerStarted","Data":"fc34d005d27222ba9969ab9e7fa88f4dd5d54292a919c39a315ef0d75feccbe7"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.414829 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.417822 4672 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-6442q container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.417874 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" podUID="5462cde6-0d5f-4802-bf9b-374b14693d1e" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.419541 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" event={"ID":"e626da5b-36d9-405a-b44e-fb6c355a51ba","Type":"ContainerStarted","Data":"224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.420342 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.422581 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" podStartSLOduration=123.422560055 podStartE2EDuration="2m3.422560055s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.405570648 +0000 UTC m=+145.380749229" watchObservedRunningTime="2025-10-07 14:51:08.422560055 +0000 UTC m=+145.397738636" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.422792 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" event={"ID":"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc","Type":"ContainerStarted","Data":"6c790c2c0dacdd397c961088f846a9661710e9ce63ddc90bb8f413069063b535"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.422815 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" event={"ID":"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc","Type":"ContainerStarted","Data":"7856ecd6581225175c7b88585203a6ec7aacd52dcd61410b46205ed3766f7e69"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.428526 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" event={"ID":"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305","Type":"ContainerStarted","Data":"87bf1cfc95433f2fcb2070effaa4827a9f6971f128d92ae19ba3c040addc504b"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.429467 4672 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-hknhb container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" start-of-body= Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.429539 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" podUID="e626da5b-36d9-405a-b44e-fb6c355a51ba" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.434823 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.434922 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.93490279 +0000 UTC m=+145.910081371 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.435815 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.443556 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:08.943538348 +0000 UTC m=+145.918717029 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.454489 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" event={"ID":"2b444f1c-1587-4f49-9235-7313b6284a43","Type":"ContainerStarted","Data":"217038395fd275f0c55c2ee8cd70b8881d8eb560166017597e6c738e55714a85"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.483298 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" event={"ID":"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0","Type":"ContainerStarted","Data":"7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.484094 4672 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5t2r9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.484146 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" podUID="f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.505293 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" event={"ID":"45083805-b344-4b8d-8ab7-c90a7bcbebf2","Type":"ContainerStarted","Data":"8a53210ee15e184013b20f4d5fe4a91b23191d9e8473f522dd67d296c249e951"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.519861 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" event={"ID":"c2ede9ea-5c2e-4deb-8729-159facdfae12","Type":"ContainerStarted","Data":"8f91122a438d6b210f48ff8869e70e6954830cdbf0a64d3bad8085f6e6b28248"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.521656 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" event={"ID":"21ffe44a-194c-4e7f-8af1-505d832867c1","Type":"ContainerStarted","Data":"516c7a0849477df0e2a273cca57473e0a2fe3c5d89c6b3cd531328187480120c"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.523062 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd" event={"ID":"a767981e-d60a-4dbb-961b-d44896100170","Type":"ContainerStarted","Data":"b2c3a770abc20632a168e75ee0c36c65c68b6ef7aa0916a368e2d3480b197bee"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.525765 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rx4tl" event={"ID":"e3d90d2e-7f97-400b-a29b-7bf63a75b43c","Type":"ContainerStarted","Data":"ddcc403fb978c39b1054eb7493e2149ea79d7520a163354124eb561db27becd6"} Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.526097 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.526131 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.528205 4672 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-rsb29 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.528240 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" podUID="46244c53-c30f-4b0d-9e2f-873bc7c7c660" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.537600 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.537975 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.037943994 +0000 UTC m=+146.013122595 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.538678 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.540959 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.040944113 +0000 UTC m=+146.016122784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.545318 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.548996 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" podStartSLOduration=124.548981405 podStartE2EDuration="2m4.548981405s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.520218377 +0000 UTC m=+145.495396958" watchObservedRunningTime="2025-10-07 14:51:08.548981405 +0000 UTC m=+145.524159986" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.554040 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9rhdm" podStartSLOduration=126.554004717 podStartE2EDuration="2m6.554004717s" podCreationTimestamp="2025-10-07 14:49:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.551605794 +0000 UTC m=+145.526784385" watchObservedRunningTime="2025-10-07 14:51:08.554004717 +0000 UTC m=+145.529183298" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.573765 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dh89s" podStartSLOduration=123.573744327 podStartE2EDuration="2m3.573744327s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.571282172 +0000 UTC m=+145.546460753" watchObservedRunningTime="2025-10-07 14:51:08.573744327 +0000 UTC m=+145.548922908" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.620234 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-87dhj" podStartSLOduration=6.620217701 podStartE2EDuration="6.620217701s" podCreationTimestamp="2025-10-07 14:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.619195894 +0000 UTC m=+145.594374475" watchObservedRunningTime="2025-10-07 14:51:08.620217701 +0000 UTC m=+145.595396282" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.621202 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" podStartSLOduration=123.621193107 podStartE2EDuration="2m3.621193107s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.592370478 +0000 UTC m=+145.567549069" watchObservedRunningTime="2025-10-07 14:51:08.621193107 +0000 UTC m=+145.596371708" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.643732 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.644216 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.144201893 +0000 UTC m=+146.119380474 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.696226 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-5rthz" podStartSLOduration=123.696208592 podStartE2EDuration="2m3.696208592s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:08.68587301 +0000 UTC m=+145.661051601" watchObservedRunningTime="2025-10-07 14:51:08.696208592 +0000 UTC m=+145.671387183" Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.746594 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.747060 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.247042911 +0000 UTC m=+146.222221492 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.858611 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.859056 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.359038041 +0000 UTC m=+146.334216622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:08 crc kubenswrapper[4672]: I1007 14:51:08.963036 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:08 crc kubenswrapper[4672]: E1007 14:51:08.963723 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.463709498 +0000 UTC m=+146.438888079 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.065035 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.065167 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.565131619 +0000 UTC m=+146.540310200 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.065362 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.082466 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.582411984 +0000 UTC m=+146.557590565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.180735 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.180985 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.68095848 +0000 UTC m=+146.656137061 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.181247 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.181692 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.681677649 +0000 UTC m=+146.656856230 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.186073 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:09 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:09 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:09 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.186126 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.282894 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.283228 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.783209683 +0000 UTC m=+146.758388284 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.384229 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.384736 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.884701685 +0000 UTC m=+146.859880326 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.485408 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.485581 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.985551711 +0000 UTC m=+146.960730302 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.485630 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.486223 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:09.986210658 +0000 UTC m=+146.961389239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.542722 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" event={"ID":"a3f2d35f-7a5d-4d53-91b7-9ace62bc4305","Type":"ContainerStarted","Data":"e3c4794fc88e89a72015f08579bae1e6d5151618a0c0507c11451f93ae43d154"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.544545 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" event={"ID":"c3d69d29-e875-49dd-8929-40ac7f00470d","Type":"ContainerStarted","Data":"e41e74cb737b47e5643c30976f9543b4bea362733b6e8e52089a9dd49308b4ba"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.553207 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd" event={"ID":"a767981e-d60a-4dbb-961b-d44896100170","Type":"ContainerStarted","Data":"b451a7e14c5a668e5eadde1a889565d4b82d3d73153fb37f15c96cd6d0ffa920"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.553269 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd" event={"ID":"a767981e-d60a-4dbb-961b-d44896100170","Type":"ContainerStarted","Data":"72d3b95c2d8ab8ba4fa0c73b24ed8f464df024f7be11b97b44fa241481e39a4e"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.567564 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" event={"ID":"203a8e07-0bf9-4436-86eb-5e90cbe1a1d9","Type":"ContainerStarted","Data":"6c7c24fac5b74710fd10db2ee3628e34d2228a7ece8a36717d3bff81eb47051f"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.567615 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" event={"ID":"203a8e07-0bf9-4436-86eb-5e90cbe1a1d9","Type":"ContainerStarted","Data":"5b8f71013202017290f9dcfa8e82da7a07f4a063d2a107fbc17eecacbbec368f"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.578342 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" event={"ID":"158b1bc4-faf6-4c18-bb85-f0c586a04a19","Type":"ContainerStarted","Data":"16a42efacda92baa5d8f209ac403d53496eb33307053810e8535451b432972a6"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.583105 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" event={"ID":"45083805-b344-4b8d-8ab7-c90a7bcbebf2","Type":"ContainerStarted","Data":"b67ed46baf81735190dbdd6d27aea8e64272b936f7c0dba7b6306d9bdcfe1f99"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.584074 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.585062 4672 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xcwwl container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" start-of-body= Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.585097 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" podUID="45083805-b344-4b8d-8ab7-c90a7bcbebf2" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.585923 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-62btl" event={"ID":"ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2","Type":"ContainerStarted","Data":"659917cb9a1b909b05da52e37bfc4f6b907720605eee36cd165637ef87546b96"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.586387 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.586470 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.086448369 +0000 UTC m=+147.061626950 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.586711 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.587737 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.087726712 +0000 UTC m=+147.062905293 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.594880 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ssnbh" podStartSLOduration=125.59486529 podStartE2EDuration="2m5.59486529s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.590127955 +0000 UTC m=+146.565306536" watchObservedRunningTime="2025-10-07 14:51:09.59486529 +0000 UTC m=+146.570043871" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.599966 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" event={"ID":"87ed1886-2066-4275-a791-68c51b5f3b26","Type":"ContainerStarted","Data":"4567d7a343376728c64db2b69b96910c3c4986ccebbd02ef95f9ec2f5a95846b"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.604957 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" event={"ID":"53e127fb-7e94-4ae5-b143-4e6f3d19f48d","Type":"ContainerStarted","Data":"400e967b1614a97a5862dfa81d39005504f3e777782dbaf576e0beb7a9b80f56"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.614572 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" event={"ID":"ff7d6318-cde1-4fe5-b726-9b8b73e8548e","Type":"ContainerStarted","Data":"6bda482acd18503efc3b840f309d464b7254fd8efd49d34ab73012b590ff9669"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.614618 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" event={"ID":"ff7d6318-cde1-4fe5-b726-9b8b73e8548e","Type":"ContainerStarted","Data":"cbee8882a67893bf5ff36ba731773902c2a91b8f847ce76f8fc3b5b826c26b3c"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.614871 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.635538 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" event={"ID":"2bbe5ef4-60d9-4316-9a67-83c9eff14cbc","Type":"ContainerStarted","Data":"671fe92b1f1dd187aa66fa93d50da8160b8bd99265557529295fde322461c163"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.635601 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" podStartSLOduration=124.635578322 podStartE2EDuration="2m4.635578322s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.633464737 +0000 UTC m=+146.608643318" watchObservedRunningTime="2025-10-07 14:51:09.635578322 +0000 UTC m=+146.610756903" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.670250 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" event={"ID":"d9039ee1-eaf1-4b12-9849-909d95692fda","Type":"ContainerStarted","Data":"8f893e94bc89060a68e4373342097672cb7cf2a3b59507ff5389caad8979e046"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.673442 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrhnd" podStartSLOduration=124.673424239 podStartE2EDuration="2m4.673424239s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.671092388 +0000 UTC m=+146.646270969" watchObservedRunningTime="2025-10-07 14:51:09.673424239 +0000 UTC m=+146.648602820" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.687658 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" event={"ID":"adbf4a97-c315-4f3e-a38a-2a27d5fe83eb","Type":"ContainerStarted","Data":"4616efb46f7e21eab752fa54658d98d5bcc8dec9a52b8a71d1b6a5d744eced6f"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.687798 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.688287 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.18826886 +0000 UTC m=+147.163447441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.689678 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.691060 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.191035703 +0000 UTC m=+147.166214284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.699161 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" podStartSLOduration=125.699141217 podStartE2EDuration="2m5.699141217s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.696161308 +0000 UTC m=+146.671339889" watchObservedRunningTime="2025-10-07 14:51:09.699141217 +0000 UTC m=+146.674319798" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.710397 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" event={"ID":"cd583d73-11c6-4955-b98f-e490f24a239e","Type":"ContainerStarted","Data":"b8d29df63127b5bceaa49be420d9196c36aa14a23ba17848b3e4e2d58cb9992c"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.711412 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.731936 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" event={"ID":"764b1457-5fdb-4ba1-baf0-5aefbc2271ca","Type":"ContainerStarted","Data":"91758ea78393ad505133d25bfa351ead55c65a2d2a4c6e78e4af0a4778b2cf19"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.763716 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" event={"ID":"684124fe-9cb9-4aa2-962d-9699e29f9ec5","Type":"ContainerStarted","Data":"36a66db7f5b7aab82e325ec84183b896277c7df581152dd49e2c678c17094523"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.775244 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ljj62" event={"ID":"498e2dcc-7140-4963-bf03-062dad700275","Type":"ContainerStarted","Data":"249f15b280c6bd4e82ad3b5e72f7d84999873bdc642dc67745669527b16ca205"} Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.775865 4672 patch_prober.go:28] interesting pod/console-operator-58897d9998-5fqpf container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.775898 4672 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5t2r9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.775910 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-5fqpf" podUID="7100c20d-df6c-4382-856e-68e823252e84" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.775942 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" podUID="f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.779322 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.779364 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.779983 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-j5nfj" podStartSLOduration=124.779965835 podStartE2EDuration="2m4.779965835s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.779620956 +0000 UTC m=+146.754799537" watchObservedRunningTime="2025-10-07 14:51:09.779965835 +0000 UTC m=+146.755144416" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.780427 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6442q" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.780446 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-fgbcz" podStartSLOduration=124.780442098 podStartE2EDuration="2m4.780442098s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.735206016 +0000 UTC m=+146.710384597" watchObservedRunningTime="2025-10-07 14:51:09.780442098 +0000 UTC m=+146.755620679" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.790498 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.792076 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.292059144 +0000 UTC m=+147.267237725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.817455 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" podStartSLOduration=125.817439362 podStartE2EDuration="2m5.817439362s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.814581987 +0000 UTC m=+146.789760568" watchObservedRunningTime="2025-10-07 14:51:09.817439362 +0000 UTC m=+146.792617963" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.835294 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-747wv" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.873518 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" podStartSLOduration=124.873492529 podStartE2EDuration="2m4.873492529s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.871390553 +0000 UTC m=+146.846569134" watchObservedRunningTime="2025-10-07 14:51:09.873492529 +0000 UTC m=+146.848671110" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.894148 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.897509 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.397493621 +0000 UTC m=+147.372672202 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.908323 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-ljj62" podStartSLOduration=7.908305576 podStartE2EDuration="7.908305576s" podCreationTimestamp="2025-10-07 14:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.901459135 +0000 UTC m=+146.876637716" watchObservedRunningTime="2025-10-07 14:51:09.908305576 +0000 UTC m=+146.883484157" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.955868 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bkmq6" podStartSLOduration=125.955846628 podStartE2EDuration="2m5.955846628s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:09.954386689 +0000 UTC m=+146.929565290" watchObservedRunningTime="2025-10-07 14:51:09.955846628 +0000 UTC m=+146.931025209" Oct 07 14:51:09 crc kubenswrapper[4672]: I1007 14:51:09.997681 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:09 crc kubenswrapper[4672]: E1007 14:51:09.998138 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.498118671 +0000 UTC m=+147.473297262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.023826 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" podStartSLOduration=125.023799227 podStartE2EDuration="2m5.023799227s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:10.014330318 +0000 UTC m=+146.989508919" watchObservedRunningTime="2025-10-07 14:51:10.023799227 +0000 UTC m=+146.998977808" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.035760 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.100060 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.100515 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.600497687 +0000 UTC m=+147.575676268 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.200487 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:10 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:10 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:10 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.200566 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.200890 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.201052 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.701010235 +0000 UTC m=+147.676188816 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.201199 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.201616 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.70160511 +0000 UTC m=+147.676783691 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.225877 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ft5w7" podStartSLOduration=126.225857719 podStartE2EDuration="2m6.225857719s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:10.224399721 +0000 UTC m=+147.199578302" watchObservedRunningTime="2025-10-07 14:51:10.225857719 +0000 UTC m=+147.201036300" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.226009 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t4nwf" podStartSLOduration=125.226003013 podStartE2EDuration="2m5.226003013s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:10.143348426 +0000 UTC m=+147.118527007" watchObservedRunningTime="2025-10-07 14:51:10.226003013 +0000 UTC m=+147.201181594" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.302343 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.302580 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.802551299 +0000 UTC m=+147.777729880 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.302985 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.303377 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.80336572 +0000 UTC m=+147.778544361 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.303599 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hqcjc" podStartSLOduration=126.303586816 podStartE2EDuration="2m6.303586816s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:10.300914786 +0000 UTC m=+147.276093387" watchObservedRunningTime="2025-10-07 14:51:10.303586816 +0000 UTC m=+147.278765397" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.359172 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vl5k6" podStartSLOduration=125.35915503 podStartE2EDuration="2m5.35915503s" podCreationTimestamp="2025-10-07 14:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:10.356680785 +0000 UTC m=+147.331859366" watchObservedRunningTime="2025-10-07 14:51:10.35915503 +0000 UTC m=+147.334333611" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.404209 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.404625 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:10.904599077 +0000 UTC m=+147.879777658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.505579 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.506037 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.006003408 +0000 UTC m=+147.981182039 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.607057 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.607429 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.107411948 +0000 UTC m=+148.082590529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.708816 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.709241 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.20922534 +0000 UTC m=+148.184403921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.782319 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" event={"ID":"684124fe-9cb9-4aa2-962d-9699e29f9ec5","Type":"ContainerStarted","Data":"58a435f16d26252154057f87fa4f029f4f4d0975f0dc0396bedf704f200bba40"} Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.783519 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" event={"ID":"c2ede9ea-5c2e-4deb-8729-159facdfae12","Type":"ContainerStarted","Data":"0229afeccc44ffd9689fb5d6298882444535724cccf839971982bf42dabda66a"} Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.784856 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-62btl" event={"ID":"ebc5b2fc-a77e-4c09-9c64-ca4d7d02dcc2","Type":"ContainerStarted","Data":"e65d6e542d6772f23d191e355a8546c49027cd5d9889d6e38049a7c001756640"} Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.785460 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.785599 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.786121 4672 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xcwwl container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" start-of-body= Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.786216 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" podUID="45083805-b344-4b8d-8ab7-c90a7bcbebf2" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.787277 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.810760 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.810930 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.310904798 +0000 UTC m=+148.286083389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.811048 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.811365 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.3113571 +0000 UTC m=+148.286535681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.823966 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" podStartSLOduration=126.823948932 podStartE2EDuration="2m6.823948932s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:10.816989538 +0000 UTC m=+147.792168119" watchObservedRunningTime="2025-10-07 14:51:10.823948932 +0000 UTC m=+147.799127513" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.843722 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-62btl" podStartSLOduration=126.843696632 podStartE2EDuration="2m6.843696632s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:10.840811446 +0000 UTC m=+147.815990027" watchObservedRunningTime="2025-10-07 14:51:10.843696632 +0000 UTC m=+147.818875213" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.865804 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qgmqj"] Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.866938 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.874877 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.907522 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qgmqj"] Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.913581 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.913894 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.914108 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.914220 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.914269 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4r72\" (UniqueName: \"kubernetes.io/projected/ac1d9066-0e72-4070-943d-fc2b01091892-kube-api-access-c4r72\") pod \"certified-operators-qgmqj\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.914400 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.915063 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-catalog-content\") pod \"certified-operators-qgmqj\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.915125 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-utilities\") pod \"certified-operators-qgmqj\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:10 crc kubenswrapper[4672]: E1007 14:51:10.915895 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.415878573 +0000 UTC m=+148.391057154 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.957238 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.958134 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.959472 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:51:10 crc kubenswrapper[4672]: I1007 14:51:10.970869 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.016305 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.016352 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-catalog-content\") pod \"certified-operators-qgmqj\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.016374 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-utilities\") pod \"certified-operators-qgmqj\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.016420 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4r72\" (UniqueName: \"kubernetes.io/projected/ac1d9066-0e72-4070-943d-fc2b01091892-kube-api-access-c4r72\") pod \"certified-operators-qgmqj\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.017046 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.517030327 +0000 UTC m=+148.492208918 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.017612 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-catalog-content\") pod \"certified-operators-qgmqj\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.017829 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-utilities\") pod \"certified-operators-qgmqj\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.068226 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9rvvc"] Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.069612 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.088007 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.094139 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4r72\" (UniqueName: \"kubernetes.io/projected/ac1d9066-0e72-4070-943d-fc2b01091892-kube-api-access-c4r72\") pod \"certified-operators-qgmqj\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.104835 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9rvvc"] Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.117617 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.117967 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmxr9\" (UniqueName: \"kubernetes.io/projected/4d331851-6416-4fa6-965c-f20fa52b7d32-kube-api-access-bmxr9\") pod \"community-operators-9rvvc\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.118046 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-utilities\") pod \"community-operators-9rvvc\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.118098 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-catalog-content\") pod \"community-operators-9rvvc\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.118302 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.618281264 +0000 UTC m=+148.593459845 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.124337 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.126112 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.132407 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.177904 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.179010 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.189098 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.189305 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.194223 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:11 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:11 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:11 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.194281 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.219361 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.219822 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.219867 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmxr9\" (UniqueName: \"kubernetes.io/projected/4d331851-6416-4fa6-965c-f20fa52b7d32-kube-api-access-bmxr9\") pod \"community-operators-9rvvc\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.219889 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-utilities\") pod \"community-operators-9rvvc\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.219919 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.219941 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-catalog-content\") pod \"community-operators-9rvvc\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.221044 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-utilities\") pod \"community-operators-9rvvc\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.221418 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.72140093 +0000 UTC m=+148.696579511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.223193 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-catalog-content\") pod \"community-operators-9rvvc\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.224756 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.285336 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.287991 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmxr9\" (UniqueName: \"kubernetes.io/projected/4d331851-6416-4fa6-965c-f20fa52b7d32-kube-api-access-bmxr9\") pod \"community-operators-9rvvc\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.288389 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nqwwk"] Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.289713 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.320778 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.321129 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.321182 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-utilities\") pod \"certified-operators-nqwwk\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.321211 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26v2k\" (UniqueName: \"kubernetes.io/projected/0b738796-9990-4a2d-b5ce-e86e79a7da40-kube-api-access-26v2k\") pod \"certified-operators-nqwwk\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.321244 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.321284 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-catalog-content\") pod \"certified-operators-nqwwk\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.322165 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.822142983 +0000 UTC m=+148.797321564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.322234 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.337463 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nqwwk"] Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.414984 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.416444 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.425032 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-utilities\") pod \"certified-operators-nqwwk\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.425094 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26v2k\" (UniqueName: \"kubernetes.io/projected/0b738796-9990-4a2d-b5ce-e86e79a7da40-kube-api-access-26v2k\") pod \"certified-operators-nqwwk\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.425128 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-catalog-content\") pod \"certified-operators-nqwwk\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.425163 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.425717 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:11.92570147 +0000 UTC m=+148.900880051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.426354 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-utilities\") pod \"certified-operators-nqwwk\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.426915 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-catalog-content\") pod \"certified-operators-nqwwk\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.486861 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26v2k\" (UniqueName: \"kubernetes.io/projected/0b738796-9990-4a2d-b5ce-e86e79a7da40-kube-api-access-26v2k\") pod \"certified-operators-nqwwk\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.496921 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hxvql"] Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.498156 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.525411 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.526311 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.526675 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd6b4\" (UniqueName: \"kubernetes.io/projected/f03b8179-5963-48e0-9f4c-88502776cab7-kube-api-access-dd6b4\") pod \"community-operators-hxvql\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.526726 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-catalog-content\") pod \"community-operators-hxvql\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.526776 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-utilities\") pod \"community-operators-hxvql\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.526926 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.026907506 +0000 UTC m=+149.002086097 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.567899 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hxvql"] Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.634802 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-utilities\") pod \"community-operators-hxvql\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.634924 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd6b4\" (UniqueName: \"kubernetes.io/projected/f03b8179-5963-48e0-9f4c-88502776cab7-kube-api-access-dd6b4\") pod \"community-operators-hxvql\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.634981 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.635007 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-catalog-content\") pod \"community-operators-hxvql\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.635494 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-catalog-content\") pod \"community-operators-hxvql\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.635771 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-utilities\") pod \"community-operators-hxvql\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.636394 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.136379339 +0000 UTC m=+149.111557920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.636681 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.702725 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd6b4\" (UniqueName: \"kubernetes.io/projected/f03b8179-5963-48e0-9f4c-88502776cab7-kube-api-access-dd6b4\") pod \"community-operators-hxvql\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.756708 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.757340 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.257315264 +0000 UTC m=+149.232493845 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.810269 4672 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-trnl6 container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.810681 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" podUID="cd583d73-11c6-4955-b98f-e490f24a239e" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.821351 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.859646 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.864051 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.364032955 +0000 UTC m=+149.339211526 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:11 crc kubenswrapper[4672]: I1007 14:51:11.961528 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:11 crc kubenswrapper[4672]: E1007 14:51:11.962180 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.46216404 +0000 UTC m=+149.437342621 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.063950 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.064177 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:12 crc kubenswrapper[4672]: E1007 14:51:12.064603 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.564587977 +0000 UTC m=+149.539766558 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.076187 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3bab8ff7-6484-479d-9423-0ce0c8f7beff-metrics-certs\") pod \"network-metrics-daemon-mfxdl\" (UID: \"3bab8ff7-6484-479d-9423-0ce0c8f7beff\") " pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.164930 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:12 crc kubenswrapper[4672]: E1007 14:51:12.165408 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.665386882 +0000 UTC m=+149.640565463 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.192308 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:12 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:12 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:12 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.192373 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.266200 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:12 crc kubenswrapper[4672]: E1007 14:51:12.266533 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.766522716 +0000 UTC m=+149.741701297 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.320221 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mfxdl" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.370279 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:12 crc kubenswrapper[4672]: E1007 14:51:12.370640 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.870611957 +0000 UTC m=+149.845790538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.475199 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:12 crc kubenswrapper[4672]: E1007 14:51:12.475649 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:12.975629623 +0000 UTC m=+149.950808204 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.576838 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:12 crc kubenswrapper[4672]: E1007 14:51:12.577817 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.077794674 +0000 UTC m=+150.052973265 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.577849 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9rvvc"] Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.603622 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.681555 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:12 crc kubenswrapper[4672]: E1007 14:51:12.681878 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.181865705 +0000 UTC m=+150.157044286 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.709030 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qgmqj"] Oct 07 14:51:12 crc kubenswrapper[4672]: W1007 14:51:12.740594 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac1d9066_0e72_4070_943d_fc2b01091892.slice/crio-58bb6691a73b5cb11a65926b51972cd7d9ba80a4923c690cdc3381df32b64fe0 WatchSource:0}: Error finding container 58bb6691a73b5cb11a65926b51972cd7d9ba80a4923c690cdc3381df32b64fe0: Status 404 returned error can't find the container with id 58bb6691a73b5cb11a65926b51972cd7d9ba80a4923c690cdc3381df32b64fe0 Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.744733 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hxvql"] Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.786478 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:12 crc kubenswrapper[4672]: E1007 14:51:12.786851 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.28683381 +0000 UTC m=+150.262012391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.802155 4672 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xcwwl container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.802213 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" podUID="45083805-b344-4b8d-8ab7-c90a7bcbebf2" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.862079 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nqwwk"] Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.862424 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zjdt6"] Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.863733 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.883405 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjdt6"] Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.884050 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.907064 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.907105 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-utilities\") pod \"redhat-marketplace-zjdt6\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.907136 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-catalog-content\") pod \"redhat-marketplace-zjdt6\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.907176 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzr55\" (UniqueName: \"kubernetes.io/projected/ded2f33e-57fb-4fd2-9477-92de294be838-kube-api-access-nzr55\") pod \"redhat-marketplace-zjdt6\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:12 crc kubenswrapper[4672]: E1007 14:51:12.907493 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.407478997 +0000 UTC m=+150.382657578 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.950315 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxvql" event={"ID":"f03b8179-5963-48e0-9f4c-88502776cab7","Type":"ContainerStarted","Data":"d62084eca1be4a050da3f1f4121e7a57c41df852fe79908f5085ad3469875476"} Oct 07 14:51:12 crc kubenswrapper[4672]: I1007 14:51:12.996313 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgmqj" event={"ID":"ac1d9066-0e72-4070-943d-fc2b01091892","Type":"ContainerStarted","Data":"58bb6691a73b5cb11a65926b51972cd7d9ba80a4923c690cdc3381df32b64fe0"} Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.000757 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"64f3e1330ee08f14200ff84fddc1604b64e5249206b7ef5199825b9883e41c30"} Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.001825 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rvvc" event={"ID":"4d331851-6416-4fa6-965c-f20fa52b7d32","Type":"ContainerStarted","Data":"743fb09a0b4afae1efdd4e9c23e08c0821a73ce0283518ceff3348c8aa9bf255"} Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.007573 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.007680 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-utilities\") pod \"redhat-marketplace-zjdt6\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.007710 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-catalog-content\") pod \"redhat-marketplace-zjdt6\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.007739 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzr55\" (UniqueName: \"kubernetes.io/projected/ded2f33e-57fb-4fd2-9477-92de294be838-kube-api-access-nzr55\") pod \"redhat-marketplace-zjdt6\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.008363 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.508303372 +0000 UTC m=+150.483481953 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.009197 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-catalog-content\") pod \"redhat-marketplace-zjdt6\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.009449 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-utilities\") pod \"redhat-marketplace-zjdt6\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.016222 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"0015541000c7fc5347d3de0b61a104ea5ae775ed93ac8e73e0141179584ebd95"} Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.018515 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d24504e10ac1af22116a974a68973e2c362e90dc4b4c9f6c4b4fbbec4636a9e4"} Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.018578 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c80d3ff62a79b645acf0e298ed503fb8b2ffeaecad2355663c968400e612997a"} Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.045677 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzr55\" (UniqueName: \"kubernetes.io/projected/ded2f33e-57fb-4fd2-9477-92de294be838-kube-api-access-nzr55\") pod \"redhat-marketplace-zjdt6\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.055229 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98","Type":"ContainerStarted","Data":"b91e9102a3344ade54a8e9eb59a399a20e724ddd20b6fd1bae916bb3d39a24ae"} Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.109101 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.109390 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.609378924 +0000 UTC m=+150.584557505 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.189914 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:13 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:13 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:13 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.190486 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.195584 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-mfxdl"] Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.210271 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.71023763 +0000 UTC m=+150.685416211 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.210303 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.210636 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.211055 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.711047432 +0000 UTC m=+150.686226013 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.245612 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.261682 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7spfb"] Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.262921 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.288638 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7spfb"] Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.322648 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.322891 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-utilities\") pod \"redhat-marketplace-7spfb\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.322949 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkwjf\" (UniqueName: \"kubernetes.io/projected/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-kube-api-access-qkwjf\") pod \"redhat-marketplace-7spfb\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.323001 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-catalog-content\") pod \"redhat-marketplace-7spfb\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.323161 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.823141384 +0000 UTC m=+150.798319965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.425129 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.425195 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-utilities\") pod \"redhat-marketplace-7spfb\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.425252 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkwjf\" (UniqueName: \"kubernetes.io/projected/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-kube-api-access-qkwjf\") pod \"redhat-marketplace-7spfb\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.425323 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-catalog-content\") pod \"redhat-marketplace-7spfb\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.425490 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:13.925472339 +0000 UTC m=+150.900650970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.425808 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-catalog-content\") pod \"redhat-marketplace-7spfb\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.425962 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-utilities\") pod \"redhat-marketplace-7spfb\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.460712 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkwjf\" (UniqueName: \"kubernetes.io/projected/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-kube-api-access-qkwjf\") pod \"redhat-marketplace-7spfb\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.526112 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.526334 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.026305125 +0000 UTC m=+151.001483706 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.526484 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.526846 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.026831219 +0000 UTC m=+151.002009800 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.591330 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.627966 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.628300 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.128285041 +0000 UTC m=+151.103463622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.719289 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjdt6"] Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.732188 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.732609 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.232594508 +0000 UTC m=+151.207773089 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: W1007 14:51:13.740188 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podded2f33e_57fb_4fd2_9477_92de294be838.slice/crio-1abdb43779669cb2ce249134bcfd1d3af14622dd05828b05c8e35ceb278028cf WatchSource:0}: Error finding container 1abdb43779669cb2ce249134bcfd1d3af14622dd05828b05c8e35ceb278028cf: Status 404 returned error can't find the container with id 1abdb43779669cb2ce249134bcfd1d3af14622dd05828b05c8e35ceb278028cf Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.755747 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trnl6" Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.833364 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.833756 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.333731662 +0000 UTC m=+151.308910243 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.835525 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.835860 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.335846537 +0000 UTC m=+151.311025118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.882580 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7spfb"] Oct 07 14:51:13 crc kubenswrapper[4672]: I1007 14:51:13.937324 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:13 crc kubenswrapper[4672]: E1007 14:51:13.937731 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.43771473 +0000 UTC m=+151.412893311 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.038908 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.039290 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.539273095 +0000 UTC m=+151.514451676 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.048284 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mrfk8"] Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.049405 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.054582 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.067278 4672 generic.go:334] "Generic (PLEG): container finished" podID="f03b8179-5963-48e0-9f4c-88502776cab7" containerID="17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1" exitCode=0 Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.067485 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxvql" event={"ID":"f03b8179-5963-48e0-9f4c-88502776cab7","Type":"ContainerDied","Data":"17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.069524 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.069968 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mrfk8"] Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.072209 4672 generic.go:334] "Generic (PLEG): container finished" podID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerID="82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d" exitCode=0 Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.072287 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqwwk" event={"ID":"0b738796-9990-4a2d-b5ce-e86e79a7da40","Type":"ContainerDied","Data":"82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.072316 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqwwk" event={"ID":"0b738796-9990-4a2d-b5ce-e86e79a7da40","Type":"ContainerStarted","Data":"83c0fd239f5a49f9ebbfb4167a3f8b1fb415f924809980909576ab795c415cda"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.076101 4672 generic.go:334] "Generic (PLEG): container finished" podID="c3d69d29-e875-49dd-8929-40ac7f00470d" containerID="e41e74cb737b47e5643c30976f9543b4bea362733b6e8e52089a9dd49308b4ba" exitCode=0 Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.076162 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" event={"ID":"c3d69d29-e875-49dd-8929-40ac7f00470d","Type":"ContainerDied","Data":"e41e74cb737b47e5643c30976f9543b4bea362733b6e8e52089a9dd49308b4ba"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.084856 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" event={"ID":"3bab8ff7-6484-479d-9423-0ce0c8f7beff","Type":"ContainerStarted","Data":"efc3ec079d767a711bd50a08b2b09eb8b9d25d1f55c57176a62fa7cb4bea1a07"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.086602 4672 generic.go:334] "Generic (PLEG): container finished" podID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerID="5d37a364466d54098d9a9adb36cbe7ddbcf911bd429010ecab899878f09a766a" exitCode=0 Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.086662 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rvvc" event={"ID":"4d331851-6416-4fa6-965c-f20fa52b7d32","Type":"ContainerDied","Data":"5d37a364466d54098d9a9adb36cbe7ddbcf911bd429010ecab899878f09a766a"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.090354 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6ce202b0906805b364783d84bb58e3d9fbe38343a69a1aafe32a7f71ec664157"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.095092 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7spfb" event={"ID":"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c","Type":"ContainerStarted","Data":"675ef1fb149d2ab3748654affcda3b9ebf514b6714a0aff91cacc2183f46f970"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.099601 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjdt6" event={"ID":"ded2f33e-57fb-4fd2-9477-92de294be838","Type":"ContainerStarted","Data":"1abdb43779669cb2ce249134bcfd1d3af14622dd05828b05c8e35ceb278028cf"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.100987 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"ec58e1e5f4c28b7f5e838a06f9c5ffbc24ce9532aa355870331819111695fe3a"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.101164 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.102437 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98","Type":"ContainerStarted","Data":"d34294354251715bf2e23ac5ddff654cd855316ac61371ca7ce4e0215deecb32"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.103792 4672 generic.go:334] "Generic (PLEG): container finished" podID="ac1d9066-0e72-4070-943d-fc2b01091892" containerID="8073ad86e626bd111269c0f5f2ef3a651ce4909f87aa16453416615b0e284634" exitCode=0 Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.104528 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgmqj" event={"ID":"ac1d9066-0e72-4070-943d-fc2b01091892","Type":"ContainerDied","Data":"8073ad86e626bd111269c0f5f2ef3a651ce4909f87aa16453416615b0e284634"} Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.140396 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.140576 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.640549883 +0000 UTC m=+151.615728464 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.140845 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.140942 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-catalog-content\") pod \"redhat-operators-mrfk8\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.140990 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fspz6\" (UniqueName: \"kubernetes.io/projected/42794156-02f5-474c-8fa3-c13eb4db08b0-kube-api-access-fspz6\") pod \"redhat-operators-mrfk8\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.141185 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-utilities\") pod \"redhat-operators-mrfk8\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.142467 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.642445222 +0000 UTC m=+151.617624003 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.181785 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.181761878 podStartE2EDuration="3.181761878s" podCreationTimestamp="2025-10-07 14:51:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:14.17919542 +0000 UTC m=+151.154374021" watchObservedRunningTime="2025-10-07 14:51:14.181761878 +0000 UTC m=+151.156940459" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.185859 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:14 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:14 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:14 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.185919 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.243111 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.243278 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-utilities\") pod \"redhat-operators-mrfk8\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.243391 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.743363751 +0000 UTC m=+151.718542342 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.243490 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.243578 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-catalog-content\") pod \"redhat-operators-mrfk8\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.243644 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fspz6\" (UniqueName: \"kubernetes.io/projected/42794156-02f5-474c-8fa3-c13eb4db08b0-kube-api-access-fspz6\") pod \"redhat-operators-mrfk8\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.243749 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-utilities\") pod \"redhat-operators-mrfk8\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.244091 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.744077579 +0000 UTC m=+151.719256220 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.244258 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-catalog-content\") pod \"redhat-operators-mrfk8\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.292368 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fspz6\" (UniqueName: \"kubernetes.io/projected/42794156-02f5-474c-8fa3-c13eb4db08b0-kube-api-access-fspz6\") pod \"redhat-operators-mrfk8\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.344789 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.346930 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.846902277 +0000 UTC m=+151.822080858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.372649 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.437914 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.447512 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.447875 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:14.947860056 +0000 UTC m=+151.923038637 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.462811 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qlpls"] Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.464056 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.480148 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qlpls"] Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.542434 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.542482 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.543165 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.543474 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.548839 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.550816 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.050785727 +0000 UTC m=+152.025964318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.552044 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.553343 4672 patch_prober.go:28] interesting pod/console-f9d7485db-5547l container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.553382 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-5547l" podUID="84ee05d6-7689-446b-a8b6-4e186bbbec44" containerName="console" probeResult="failure" output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.553990 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.05393225 +0000 UTC m=+152.029110901 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.559414 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.648325 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-5fqpf" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.655309 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.655666 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-catalog-content\") pod \"redhat-operators-qlpls\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.655882 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.155829264 +0000 UTC m=+152.131007855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.656095 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.656231 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-utilities\") pod \"redhat-operators-qlpls\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.656298 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f4rp\" (UniqueName: \"kubernetes.io/projected/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-kube-api-access-8f4rp\") pod \"redhat-operators-qlpls\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.656729 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.156710847 +0000 UTC m=+152.131889428 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.688569 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mrfk8"] Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.756949 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.757287 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-utilities\") pod \"redhat-operators-qlpls\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.757328 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f4rp\" (UniqueName: \"kubernetes.io/projected/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-kube-api-access-8f4rp\") pod \"redhat-operators-qlpls\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.757352 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-catalog-content\") pod \"redhat-operators-qlpls\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.758180 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.258160509 +0000 UTC m=+152.233339090 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.759394 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-utilities\") pod \"redhat-operators-qlpls\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.759947 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-catalog-content\") pod \"redhat-operators-qlpls\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.788738 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f4rp\" (UniqueName: \"kubernetes.io/projected/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-kube-api-access-8f4rp\") pod \"redhat-operators-qlpls\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.823202 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.835739 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.860843 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.861608 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.361590413 +0000 UTC m=+152.336769094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.880329 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.920259 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.920320 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.920268 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.920397 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.962208 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.962401 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.462372458 +0000 UTC m=+152.437551049 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:14 crc kubenswrapper[4672]: I1007 14:51:14.962619 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:14 crc kubenswrapper[4672]: E1007 14:51:14.962956 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.462946203 +0000 UTC m=+152.438124784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.058839 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qlpls"] Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.063550 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.063724 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.563700126 +0000 UTC m=+152.538878727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: W1007 14:51:15.091342 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf1fc2f5_50a9_4f47_8acc_b2495f3388ac.slice/crio-373048e6b16189e217ac60b8d79a1ba23768c0943baa89a41de960f53c701d96 WatchSource:0}: Error finding container 373048e6b16189e217ac60b8d79a1ba23768c0943baa89a41de960f53c701d96: Status 404 returned error can't find the container with id 373048e6b16189e217ac60b8d79a1ba23768c0943baa89a41de960f53c701d96 Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.110391 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.110435 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.124734 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qlpls" event={"ID":"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac","Type":"ContainerStarted","Data":"373048e6b16189e217ac60b8d79a1ba23768c0943baa89a41de960f53c701d96"} Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.126746 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" event={"ID":"3bab8ff7-6484-479d-9423-0ce0c8f7beff","Type":"ContainerStarted","Data":"f9213a01bd42bfd2ea864d6bea2d41e4145e735bd8a0b8cc288257e6952e78e4"} Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.126774 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-mfxdl" event={"ID":"3bab8ff7-6484-479d-9423-0ce0c8f7beff","Type":"ContainerStarted","Data":"eb0494a11a197585df693640bb50df85432a2a713195cb1985f30838ac14b3f5"} Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.128810 4672 generic.go:334] "Generic (PLEG): container finished" podID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerID="1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a" exitCode=0 Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.128873 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7spfb" event={"ID":"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c","Type":"ContainerDied","Data":"1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a"} Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.132540 4672 generic.go:334] "Generic (PLEG): container finished" podID="ded2f33e-57fb-4fd2-9477-92de294be838" containerID="107ea54990c87268f0c5d59eb31d54f083367cfd303131f700aca738831dafda" exitCode=0 Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.132763 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjdt6" event={"ID":"ded2f33e-57fb-4fd2-9477-92de294be838","Type":"ContainerDied","Data":"107ea54990c87268f0c5d59eb31d54f083367cfd303131f700aca738831dafda"} Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.138051 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrfk8" event={"ID":"42794156-02f5-474c-8fa3-c13eb4db08b0","Type":"ContainerStarted","Data":"cdd98578ab6e6fb0fd24a62242efd59927fd4a3d5ecb9987538fcf070be74942"} Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.138089 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrfk8" event={"ID":"42794156-02f5-474c-8fa3-c13eb4db08b0","Type":"ContainerStarted","Data":"31abf446c96323b422291ac488b2e0b3f270386ee6078301df83e2eb529120df"} Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.146261 4672 generic.go:334] "Generic (PLEG): container finished" podID="1fbcdc1d-cd82-4a13-9a3f-1910ab438f98" containerID="d34294354251715bf2e23ac5ddff654cd855316ac61371ca7ce4e0215deecb32" exitCode=0 Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.146873 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-mfxdl" podStartSLOduration=132.146858397 podStartE2EDuration="2m12.146858397s" podCreationTimestamp="2025-10-07 14:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:15.139702378 +0000 UTC m=+152.114880959" watchObservedRunningTime="2025-10-07 14:51:15.146858397 +0000 UTC m=+152.122036978" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.146947 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98","Type":"ContainerDied","Data":"d34294354251715bf2e23ac5ddff654cd855316ac61371ca7ce4e0215deecb32"} Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.156046 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wqndv" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.164998 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.165500 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.665482667 +0000 UTC m=+152.640661248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.182329 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.186009 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:15 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:15 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:15 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.186278 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.266608 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.268414 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.768395538 +0000 UTC m=+152.743574109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.368975 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.369428 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.869410388 +0000 UTC m=+152.844588969 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.444342 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.470503 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3d69d29-e875-49dd-8929-40ac7f00470d-secret-volume\") pod \"c3d69d29-e875-49dd-8929-40ac7f00470d\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.470622 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.470647 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3d69d29-e875-49dd-8929-40ac7f00470d-config-volume\") pod \"c3d69d29-e875-49dd-8929-40ac7f00470d\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.470674 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68kps\" (UniqueName: \"kubernetes.io/projected/c3d69d29-e875-49dd-8929-40ac7f00470d-kube-api-access-68kps\") pod \"c3d69d29-e875-49dd-8929-40ac7f00470d\" (UID: \"c3d69d29-e875-49dd-8929-40ac7f00470d\") " Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.471782 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:15.971766304 +0000 UTC m=+152.946944875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.472393 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3d69d29-e875-49dd-8929-40ac7f00470d-config-volume" (OuterVolumeSpecName: "config-volume") pod "c3d69d29-e875-49dd-8929-40ac7f00470d" (UID: "c3d69d29-e875-49dd-8929-40ac7f00470d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.476277 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3d69d29-e875-49dd-8929-40ac7f00470d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c3d69d29-e875-49dd-8929-40ac7f00470d" (UID: "c3d69d29-e875-49dd-8929-40ac7f00470d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.476308 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3d69d29-e875-49dd-8929-40ac7f00470d-kube-api-access-68kps" (OuterVolumeSpecName: "kube-api-access-68kps") pod "c3d69d29-e875-49dd-8929-40ac7f00470d" (UID: "c3d69d29-e875-49dd-8929-40ac7f00470d"). InnerVolumeSpecName "kube-api-access-68kps". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.571888 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.571986 4672 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3d69d29-e875-49dd-8929-40ac7f00470d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.572000 4672 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3d69d29-e875-49dd-8929-40ac7f00470d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.572012 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68kps\" (UniqueName: \"kubernetes.io/projected/c3d69d29-e875-49dd-8929-40ac7f00470d-kube-api-access-68kps\") on node \"crc\" DevicePath \"\"" Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.572260 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.07225064 +0000 UTC m=+153.047429221 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.672797 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.673237 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.173208839 +0000 UTC m=+153.148387420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.674649 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.675002 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.174985846 +0000 UTC m=+153.150164427 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.731899 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xcwwl" Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.775307 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.775534 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.275507414 +0000 UTC m=+153.250685985 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.775842 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.776239 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.276224083 +0000 UTC m=+153.251402664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.876562 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.876948 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.376926215 +0000 UTC m=+153.352104806 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:15 crc kubenswrapper[4672]: I1007 14:51:15.977620 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:15 crc kubenswrapper[4672]: E1007 14:51:15.977961 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.477950506 +0000 UTC m=+153.453129087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.078124 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.078306 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.578279568 +0000 UTC m=+153.553458149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.158722 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.158736 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf" event={"ID":"c3d69d29-e875-49dd-8929-40ac7f00470d","Type":"ContainerDied","Data":"35e43ae13f3745390f55b45967d09eb927f2314ee49c8c00c7b94044235b5f91"} Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.158772 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35e43ae13f3745390f55b45967d09eb927f2314ee49c8c00c7b94044235b5f91" Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.171290 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" event={"ID":"c2ede9ea-5c2e-4deb-8729-159facdfae12","Type":"ContainerStarted","Data":"24b774af9c0b0cdfe7e08f6364088a61e726fb581e8aca458ad1923ef245d67f"} Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.172791 4672 generic.go:334] "Generic (PLEG): container finished" podID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerID="cdd98578ab6e6fb0fd24a62242efd59927fd4a3d5ecb9987538fcf070be74942" exitCode=0 Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.172850 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrfk8" event={"ID":"42794156-02f5-474c-8fa3-c13eb4db08b0","Type":"ContainerDied","Data":"cdd98578ab6e6fb0fd24a62242efd59927fd4a3d5ecb9987538fcf070be74942"} Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.177316 4672 generic.go:334] "Generic (PLEG): container finished" podID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerID="e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e" exitCode=0 Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.178383 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qlpls" event={"ID":"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac","Type":"ContainerDied","Data":"e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e"} Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.179238 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.179576 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.679563396 +0000 UTC m=+153.654741967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.200217 4672 patch_prober.go:28] interesting pod/apiserver-76f77b778f-bwrwj container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]log ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]etcd ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/generic-apiserver-start-informers ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/max-in-flight-filter ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 07 14:51:16 crc kubenswrapper[4672]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 07 14:51:16 crc kubenswrapper[4672]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/project.openshift.io-projectcache ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/openshift.io-startinformers ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 07 14:51:16 crc kubenswrapper[4672]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 07 14:51:16 crc kubenswrapper[4672]: livez check failed Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.200530 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" podUID="684124fe-9cb9-4aa2-962d-9699e29f9ec5" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.207189 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:16 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:16 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:16 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.207246 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.285027 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.285128 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.785105705 +0000 UTC m=+153.760284306 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.285257 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.286595 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.786585444 +0000 UTC m=+153.761764025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.387997 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.388214 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.88818541 +0000 UTC m=+153.863363981 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.388907 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.389578 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.889558376 +0000 UTC m=+153.864736967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.490337 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.490678 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:16.990661359 +0000 UTC m=+153.965839940 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.591396 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.591827 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:17.091816263 +0000 UTC m=+154.066994844 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.602796 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.619743 4672 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.691962 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kubelet-dir\") pod \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\" (UID: \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\") " Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.692114 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kube-api-access\") pod \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\" (UID: \"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98\") " Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.692242 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.692102 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1fbcdc1d-cd82-4a13-9a3f-1910ab438f98" (UID: "1fbcdc1d-cd82-4a13-9a3f-1910ab438f98"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.692645 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:17.192583236 +0000 UTC m=+154.167761817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.698629 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1fbcdc1d-cd82-4a13-9a3f-1910ab438f98" (UID: "1fbcdc1d-cd82-4a13-9a3f-1910ab438f98"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.795214 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.795613 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:17.29559801 +0000 UTC m=+154.270776591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.795654 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.795667 4672 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fbcdc1d-cd82-4a13-9a3f-1910ab438f98-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.896783 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.896974 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:17.396943339 +0000 UTC m=+154.372121930 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.897357 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:16 crc kubenswrapper[4672]: E1007 14:51:16.897762 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:17.3977424 +0000 UTC m=+154.372920981 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:16 crc kubenswrapper[4672]: I1007 14:51:16.999815 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:17 crc kubenswrapper[4672]: E1007 14:51:17.000110 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 14:51:17.500069455 +0000 UTC m=+154.475248046 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.000186 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:17 crc kubenswrapper[4672]: E1007 14:51:17.000616 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 14:51:17.500591609 +0000 UTC m=+154.475770210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sqktl" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.054359 4672 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-07T14:51:16.619770599Z","Handler":null,"Name":""} Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.089849 4672 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.089968 4672 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.100990 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.106159 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.194233 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:17 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:17 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:17 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.194302 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.210147 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.214968 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1fbcdc1d-cd82-4a13-9a3f-1910ab438f98","Type":"ContainerDied","Data":"b91e9102a3344ade54a8e9eb59a399a20e724ddd20b6fd1bae916bb3d39a24ae"} Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.215032 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b91e9102a3344ade54a8e9eb59a399a20e724ddd20b6fd1bae916bb3d39a24ae" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.215149 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.218222 4672 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.218248 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.222885 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" event={"ID":"c2ede9ea-5c2e-4deb-8729-159facdfae12","Type":"ContainerStarted","Data":"76f7284761be6987f27d8a8bfdd24724a94e9ce7552f7105fef2f45eacc04e8d"} Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.302277 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sqktl\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.413847 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-ljj62" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.419201 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 14:51:17 crc kubenswrapper[4672]: E1007 14:51:17.419523 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fbcdc1d-cd82-4a13-9a3f-1910ab438f98" containerName="pruner" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.419546 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fbcdc1d-cd82-4a13-9a3f-1910ab438f98" containerName="pruner" Oct 07 14:51:17 crc kubenswrapper[4672]: E1007 14:51:17.419559 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3d69d29-e875-49dd-8929-40ac7f00470d" containerName="collect-profiles" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.419568 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3d69d29-e875-49dd-8929-40ac7f00470d" containerName="collect-profiles" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.419702 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fbcdc1d-cd82-4a13-9a3f-1910ab438f98" containerName="pruner" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.419720 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3d69d29-e875-49dd-8929-40ac7f00470d" containerName="collect-profiles" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.420178 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.421766 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.424824 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.425895 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.492552 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.514635 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb375547-a365-4528-b4a4-98ad3819b58b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fb375547-a365-4528-b4a4-98ad3819b58b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.514749 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb375547-a365-4528-b4a4-98ad3819b58b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fb375547-a365-4528-b4a4-98ad3819b58b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.616430 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb375547-a365-4528-b4a4-98ad3819b58b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fb375547-a365-4528-b4a4-98ad3819b58b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.616492 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb375547-a365-4528-b4a4-98ad3819b58b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fb375547-a365-4528-b4a4-98ad3819b58b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.616579 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb375547-a365-4528-b4a4-98ad3819b58b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fb375547-a365-4528-b4a4-98ad3819b58b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.636625 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb375547-a365-4528-b4a4-98ad3819b58b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fb375547-a365-4528-b4a4-98ad3819b58b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.690500 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sqktl"] Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.752079 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:17 crc kubenswrapper[4672]: I1007 14:51:17.912196 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 07 14:51:18 crc kubenswrapper[4672]: I1007 14:51:18.194432 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:18 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:18 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:18 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:18 crc kubenswrapper[4672]: I1007 14:51:18.194493 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:18 crc kubenswrapper[4672]: I1007 14:51:18.220755 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 14:51:18 crc kubenswrapper[4672]: I1007 14:51:18.242820 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" event={"ID":"c2ede9ea-5c2e-4deb-8729-159facdfae12","Type":"ContainerStarted","Data":"2f086dca92d5100eec02ab98f21de855e7d7d420f050350e0bbceb9691192aa4"} Oct 07 14:51:18 crc kubenswrapper[4672]: I1007 14:51:18.266593 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" event={"ID":"eed23ae9-82ee-4db9-b353-70033e17e039","Type":"ContainerStarted","Data":"ab8078d6ba50465d355e1bbb320b329dbe7103303c2a8eedb0c4194fdc0c0421"} Oct 07 14:51:18 crc kubenswrapper[4672]: I1007 14:51:18.284766 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-qpgsl" podStartSLOduration=16.284745351 podStartE2EDuration="16.284745351s" podCreationTimestamp="2025-10-07 14:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:18.281346541 +0000 UTC m=+155.256525152" watchObservedRunningTime="2025-10-07 14:51:18.284745351 +0000 UTC m=+155.259923932" Oct 07 14:51:19 crc kubenswrapper[4672]: I1007 14:51:19.186251 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:19 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:19 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:19 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:19 crc kubenswrapper[4672]: I1007 14:51:19.186528 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:19 crc kubenswrapper[4672]: I1007 14:51:19.273166 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fb375547-a365-4528-b4a4-98ad3819b58b","Type":"ContainerStarted","Data":"1eb9925d785bb5d6b9fdd96fcc8f853170a68b6f746f4e484d4df8138c6009b9"} Oct 07 14:51:19 crc kubenswrapper[4672]: I1007 14:51:19.274435 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" event={"ID":"eed23ae9-82ee-4db9-b353-70033e17e039","Type":"ContainerStarted","Data":"a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833"} Oct 07 14:51:19 crc kubenswrapper[4672]: I1007 14:51:19.291770 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" podStartSLOduration=135.29175091 podStartE2EDuration="2m15.29175091s" podCreationTimestamp="2025-10-07 14:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:51:19.291098042 +0000 UTC m=+156.266276643" watchObservedRunningTime="2025-10-07 14:51:19.29175091 +0000 UTC m=+156.266929491" Oct 07 14:51:20 crc kubenswrapper[4672]: I1007 14:51:20.114055 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:20 crc kubenswrapper[4672]: I1007 14:51:20.118550 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-bwrwj" Oct 07 14:51:20 crc kubenswrapper[4672]: I1007 14:51:20.190007 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:20 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:20 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:20 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:20 crc kubenswrapper[4672]: I1007 14:51:20.190078 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:20 crc kubenswrapper[4672]: I1007 14:51:20.285973 4672 generic.go:334] "Generic (PLEG): container finished" podID="fb375547-a365-4528-b4a4-98ad3819b58b" containerID="42b3e6247077d0d876b3bccecf340ace3130a52d87de05366c33459a8af6413e" exitCode=0 Oct 07 14:51:20 crc kubenswrapper[4672]: I1007 14:51:20.286080 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fb375547-a365-4528-b4a4-98ad3819b58b","Type":"ContainerDied","Data":"42b3e6247077d0d876b3bccecf340ace3130a52d87de05366c33459a8af6413e"} Oct 07 14:51:20 crc kubenswrapper[4672]: I1007 14:51:20.286797 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:21 crc kubenswrapper[4672]: I1007 14:51:21.186222 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:21 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:21 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:21 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:21 crc kubenswrapper[4672]: I1007 14:51:21.186290 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:22 crc kubenswrapper[4672]: I1007 14:51:22.185832 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:22 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:22 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:22 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:22 crc kubenswrapper[4672]: I1007 14:51:22.186197 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:23 crc kubenswrapper[4672]: I1007 14:51:23.185081 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:23 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:23 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:23 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:23 crc kubenswrapper[4672]: I1007 14:51:23.185153 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:24 crc kubenswrapper[4672]: I1007 14:51:24.185640 4672 patch_prober.go:28] interesting pod/router-default-5444994796-n8l22 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 14:51:24 crc kubenswrapper[4672]: [-]has-synced failed: reason withheld Oct 07 14:51:24 crc kubenswrapper[4672]: [+]process-running ok Oct 07 14:51:24 crc kubenswrapper[4672]: healthz check failed Oct 07 14:51:24 crc kubenswrapper[4672]: I1007 14:51:24.185704 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n8l22" podUID="943fe211-1f41-4621-a164-678a5a38ec82" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 14:51:24 crc kubenswrapper[4672]: I1007 14:51:24.542604 4672 patch_prober.go:28] interesting pod/console-f9d7485db-5547l container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 07 14:51:24 crc kubenswrapper[4672]: I1007 14:51:24.545652 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-5547l" podUID="84ee05d6-7689-446b-a8b6-4e186bbbec44" containerName="console" probeResult="failure" output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 07 14:51:24 crc kubenswrapper[4672]: I1007 14:51:24.920624 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:24 crc kubenswrapper[4672]: I1007 14:51:24.920666 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:24 crc kubenswrapper[4672]: I1007 14:51:24.921088 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:24 crc kubenswrapper[4672]: I1007 14:51:24.921026 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.191670 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.195251 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-n8l22" Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.215935 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.319671 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb375547-a365-4528-b4a4-98ad3819b58b-kubelet-dir\") pod \"fb375547-a365-4528-b4a4-98ad3819b58b\" (UID: \"fb375547-a365-4528-b4a4-98ad3819b58b\") " Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.319875 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb375547-a365-4528-b4a4-98ad3819b58b-kube-api-access\") pod \"fb375547-a365-4528-b4a4-98ad3819b58b\" (UID: \"fb375547-a365-4528-b4a4-98ad3819b58b\") " Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.320965 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb375547-a365-4528-b4a4-98ad3819b58b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "fb375547-a365-4528-b4a4-98ad3819b58b" (UID: "fb375547-a365-4528-b4a4-98ad3819b58b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.325929 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb375547-a365-4528-b4a4-98ad3819b58b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "fb375547-a365-4528-b4a4-98ad3819b58b" (UID: "fb375547-a365-4528-b4a4-98ad3819b58b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.333049 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.333042 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fb375547-a365-4528-b4a4-98ad3819b58b","Type":"ContainerDied","Data":"1eb9925d785bb5d6b9fdd96fcc8f853170a68b6f746f4e484d4df8138c6009b9"} Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.333096 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1eb9925d785bb5d6b9fdd96fcc8f853170a68b6f746f4e484d4df8138c6009b9" Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.421425 4672 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb375547-a365-4528-b4a4-98ad3819b58b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 07 14:51:25 crc kubenswrapper[4672]: I1007 14:51:25.421472 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb375547-a365-4528-b4a4-98ad3819b58b-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 14:51:26 crc kubenswrapper[4672]: I1007 14:51:26.650396 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 14:51:26 crc kubenswrapper[4672]: I1007 14:51:26.650719 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.552620 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.556591 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-5547l" Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.920204 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.920271 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.920297 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.920385 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.920457 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-rx4tl" Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.921396 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"ddcc403fb978c39b1054eb7493e2149ea79d7520a163354124eb561db27becd6"} pod="openshift-console/downloads-7954f5f757-rx4tl" containerMessage="Container download-server failed liveness probe, will be restarted" Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.921462 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.921544 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:34 crc kubenswrapper[4672]: I1007 14:51:34.921511 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" containerID="cri-o://ddcc403fb978c39b1054eb7493e2149ea79d7520a163354124eb561db27becd6" gracePeriod=2 Oct 07 14:51:36 crc kubenswrapper[4672]: I1007 14:51:36.411748 4672 generic.go:334] "Generic (PLEG): container finished" podID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerID="ddcc403fb978c39b1054eb7493e2149ea79d7520a163354124eb561db27becd6" exitCode=0 Oct 07 14:51:36 crc kubenswrapper[4672]: I1007 14:51:36.411854 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rx4tl" event={"ID":"e3d90d2e-7f97-400b-a29b-7bf63a75b43c","Type":"ContainerDied","Data":"ddcc403fb978c39b1054eb7493e2149ea79d7520a163354124eb561db27becd6"} Oct 07 14:51:37 crc kubenswrapper[4672]: I1007 14:51:37.498943 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:51:40 crc kubenswrapper[4672]: E1007 14:51:40.341708 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 07 14:51:40 crc kubenswrapper[4672]: E1007 14:51:40.342248 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-26v2k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-nqwwk_openshift-marketplace(0b738796-9990-4a2d-b5ce-e86e79a7da40): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 14:51:40 crc kubenswrapper[4672]: E1007 14:51:40.345198 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-nqwwk" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" Oct 07 14:51:40 crc kubenswrapper[4672]: E1007 14:51:40.436043 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-nqwwk" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" Oct 07 14:51:41 crc kubenswrapper[4672]: E1007 14:51:41.022836 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage3548678195/3\": happened during read: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 07 14:51:41 crc kubenswrapper[4672]: E1007 14:51:41.023047 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nzr55,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-zjdt6_openshift-marketplace(ded2f33e-57fb-4fd2-9477-92de294be838): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage3548678195/3\": happened during read: context canceled" logger="UnhandledError" Oct 07 14:51:41 crc kubenswrapper[4672]: E1007 14:51:41.024249 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \\\"/var/tmp/container_images_storage3548678195/3\\\": happened during read: context canceled\"" pod="openshift-marketplace/redhat-marketplace-zjdt6" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" Oct 07 14:51:41 crc kubenswrapper[4672]: I1007 14:51:41.447459 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rx4tl" event={"ID":"e3d90d2e-7f97-400b-a29b-7bf63a75b43c","Type":"ContainerStarted","Data":"21b7b10ab7e0010c6d677597e6e79922e7bb5553794d8c06af11030c197763a4"} Oct 07 14:51:41 crc kubenswrapper[4672]: I1007 14:51:41.451204 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:41 crc kubenswrapper[4672]: I1007 14:51:41.451273 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:41 crc kubenswrapper[4672]: E1007 14:51:41.451583 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-zjdt6" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" Oct 07 14:51:42 crc kubenswrapper[4672]: I1007 14:51:42.453293 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-rx4tl" Oct 07 14:51:42 crc kubenswrapper[4672]: I1007 14:51:42.453754 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:42 crc kubenswrapper[4672]: I1007 14:51:42.453833 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:43 crc kubenswrapper[4672]: I1007 14:51:43.458137 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:43 crc kubenswrapper[4672]: I1007 14:51:43.458192 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:44 crc kubenswrapper[4672]: I1007 14:51:44.919992 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:44 crc kubenswrapper[4672]: I1007 14:51:44.920166 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:44 crc kubenswrapper[4672]: I1007 14:51:44.920405 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:44 crc kubenswrapper[4672]: I1007 14:51:44.920438 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:45 crc kubenswrapper[4672]: E1007 14:51:45.320667 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 07 14:51:45 crc kubenswrapper[4672]: E1007 14:51:45.321011 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dd6b4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-hxvql_openshift-marketplace(f03b8179-5963-48e0-9f4c-88502776cab7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 14:51:45 crc kubenswrapper[4672]: E1007 14:51:45.322212 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-hxvql" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" Oct 07 14:51:45 crc kubenswrapper[4672]: E1007 14:51:45.363221 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 07 14:51:45 crc kubenswrapper[4672]: E1007 14:51:45.363403 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bmxr9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-9rvvc_openshift-marketplace(4d331851-6416-4fa6-965c-f20fa52b7d32): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 14:51:45 crc kubenswrapper[4672]: E1007 14:51:45.364748 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-9rvvc" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" Oct 07 14:51:45 crc kubenswrapper[4672]: I1007 14:51:45.643805 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qv6g9" Oct 07 14:51:49 crc kubenswrapper[4672]: E1007 14:51:49.612425 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-9rvvc" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" Oct 07 14:51:49 crc kubenswrapper[4672]: E1007 14:51:49.612845 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-hxvql" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" Oct 07 14:51:51 crc kubenswrapper[4672]: I1007 14:51:51.155982 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 14:51:54 crc kubenswrapper[4672]: E1007 14:51:54.636554 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 07 14:51:54 crc kubenswrapper[4672]: E1007 14:51:54.636905 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fspz6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mrfk8_openshift-marketplace(42794156-02f5-474c-8fa3-c13eb4db08b0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 14:51:54 crc kubenswrapper[4672]: E1007 14:51:54.638125 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-mrfk8" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" Oct 07 14:51:54 crc kubenswrapper[4672]: I1007 14:51:54.920505 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:54 crc kubenswrapper[4672]: I1007 14:51:54.920569 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:54 crc kubenswrapper[4672]: I1007 14:51:54.920511 4672 patch_prober.go:28] interesting pod/downloads-7954f5f757-rx4tl container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Oct 07 14:51:54 crc kubenswrapper[4672]: I1007 14:51:54.920668 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rx4tl" podUID="e3d90d2e-7f97-400b-a29b-7bf63a75b43c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Oct 07 14:51:55 crc kubenswrapper[4672]: E1007 14:51:55.469897 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 07 14:51:55 crc kubenswrapper[4672]: E1007 14:51:55.470086 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8f4rp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-qlpls_openshift-marketplace(cf1fc2f5-50a9-4f47-8acc-b2495f3388ac): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 14:51:55 crc kubenswrapper[4672]: E1007 14:51:55.471275 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-qlpls" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" Oct 07 14:51:56 crc kubenswrapper[4672]: I1007 14:51:56.650695 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 14:51:56 crc kubenswrapper[4672]: I1007 14:51:56.650767 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 14:52:04 crc kubenswrapper[4672]: I1007 14:52:04.925092 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-rx4tl" Oct 07 14:52:14 crc kubenswrapper[4672]: I1007 14:52:14.618235 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgmqj" event={"ID":"ac1d9066-0e72-4070-943d-fc2b01091892","Type":"ContainerStarted","Data":"f4d892f0b5dca8d10fc14ef634c59c0228120defddb9b8eda01818580d9aef05"} Oct 07 14:52:15 crc kubenswrapper[4672]: I1007 14:52:15.626791 4672 generic.go:334] "Generic (PLEG): container finished" podID="ac1d9066-0e72-4070-943d-fc2b01091892" containerID="f4d892f0b5dca8d10fc14ef634c59c0228120defddb9b8eda01818580d9aef05" exitCode=0 Oct 07 14:52:15 crc kubenswrapper[4672]: I1007 14:52:15.626914 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgmqj" event={"ID":"ac1d9066-0e72-4070-943d-fc2b01091892","Type":"ContainerDied","Data":"f4d892f0b5dca8d10fc14ef634c59c0228120defddb9b8eda01818580d9aef05"} Oct 07 14:52:16 crc kubenswrapper[4672]: E1007 14:52:16.120042 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 07 14:52:16 crc kubenswrapper[4672]: E1007 14:52:16.120345 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qkwjf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7spfb_openshift-marketplace(c36536f1-8c7b-44ba-9573-04f1d3f9ed3c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 14:52:16 crc kubenswrapper[4672]: E1007 14:52:16.121941 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7spfb" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" Oct 07 14:52:16 crc kubenswrapper[4672]: E1007 14:52:16.634107 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7spfb" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" Oct 07 14:52:17 crc kubenswrapper[4672]: I1007 14:52:17.640975 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rvvc" event={"ID":"4d331851-6416-4fa6-965c-f20fa52b7d32","Type":"ContainerStarted","Data":"a81644f5f9e3368852f38eb30ba5c2df58e882a3742898baef9c50b28e9f2222"} Oct 07 14:52:17 crc kubenswrapper[4672]: I1007 14:52:17.642366 4672 generic.go:334] "Generic (PLEG): container finished" podID="ded2f33e-57fb-4fd2-9477-92de294be838" containerID="9932851aea50dd4bca2db7a88463b05445a97b0f628cc1c0db92b970b212ca76" exitCode=0 Oct 07 14:52:17 crc kubenswrapper[4672]: I1007 14:52:17.642429 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjdt6" event={"ID":"ded2f33e-57fb-4fd2-9477-92de294be838","Type":"ContainerDied","Data":"9932851aea50dd4bca2db7a88463b05445a97b0f628cc1c0db92b970b212ca76"} Oct 07 14:52:17 crc kubenswrapper[4672]: I1007 14:52:17.644318 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrfk8" event={"ID":"42794156-02f5-474c-8fa3-c13eb4db08b0","Type":"ContainerStarted","Data":"55872ed8e8eed01caf5f4114fb28e29a4ffcb7938f537830c746845e6520da6f"} Oct 07 14:52:17 crc kubenswrapper[4672]: I1007 14:52:17.646904 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxvql" event={"ID":"f03b8179-5963-48e0-9f4c-88502776cab7","Type":"ContainerStarted","Data":"5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4"} Oct 07 14:52:17 crc kubenswrapper[4672]: I1007 14:52:17.648800 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qlpls" event={"ID":"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac","Type":"ContainerStarted","Data":"7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e"} Oct 07 14:52:17 crc kubenswrapper[4672]: I1007 14:52:17.653693 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqwwk" event={"ID":"0b738796-9990-4a2d-b5ce-e86e79a7da40","Type":"ContainerStarted","Data":"57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3"} Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.660111 4672 generic.go:334] "Generic (PLEG): container finished" podID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerID="55872ed8e8eed01caf5f4114fb28e29a4ffcb7938f537830c746845e6520da6f" exitCode=0 Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.660195 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrfk8" event={"ID":"42794156-02f5-474c-8fa3-c13eb4db08b0","Type":"ContainerDied","Data":"55872ed8e8eed01caf5f4114fb28e29a4ffcb7938f537830c746845e6520da6f"} Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.662203 4672 generic.go:334] "Generic (PLEG): container finished" podID="f03b8179-5963-48e0-9f4c-88502776cab7" containerID="5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4" exitCode=0 Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.662248 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxvql" event={"ID":"f03b8179-5963-48e0-9f4c-88502776cab7","Type":"ContainerDied","Data":"5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4"} Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.665156 4672 generic.go:334] "Generic (PLEG): container finished" podID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerID="7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e" exitCode=0 Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.665715 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qlpls" event={"ID":"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac","Type":"ContainerDied","Data":"7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e"} Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.667943 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgmqj" event={"ID":"ac1d9066-0e72-4070-943d-fc2b01091892","Type":"ContainerStarted","Data":"2837aa6bbae037780f052ba3b803d3be4645a1491b7c8ee943a74bf8eeb1af88"} Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.672759 4672 generic.go:334] "Generic (PLEG): container finished" podID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerID="57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3" exitCode=0 Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.672834 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqwwk" event={"ID":"0b738796-9990-4a2d-b5ce-e86e79a7da40","Type":"ContainerDied","Data":"57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3"} Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.676169 4672 generic.go:334] "Generic (PLEG): container finished" podID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerID="a81644f5f9e3368852f38eb30ba5c2df58e882a3742898baef9c50b28e9f2222" exitCode=0 Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.676234 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rvvc" event={"ID":"4d331851-6416-4fa6-965c-f20fa52b7d32","Type":"ContainerDied","Data":"a81644f5f9e3368852f38eb30ba5c2df58e882a3742898baef9c50b28e9f2222"} Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.681344 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjdt6" event={"ID":"ded2f33e-57fb-4fd2-9477-92de294be838","Type":"ContainerStarted","Data":"e3b0171c4b475385dd42c53cc99595e4dd7b3e673d84f6bf388d843477ba3e37"} Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.714470 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qgmqj" podStartSLOduration=5.047633798 podStartE2EDuration="1m8.714449668s" podCreationTimestamp="2025-10-07 14:51:10 +0000 UTC" firstStartedPulling="2025-10-07 14:51:14.104968175 +0000 UTC m=+151.080146756" lastFinishedPulling="2025-10-07 14:52:17.771784055 +0000 UTC m=+214.746962626" observedRunningTime="2025-10-07 14:52:18.697779443 +0000 UTC m=+215.672958014" watchObservedRunningTime="2025-10-07 14:52:18.714449668 +0000 UTC m=+215.689628249" Oct 07 14:52:18 crc kubenswrapper[4672]: I1007 14:52:18.794545 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zjdt6" podStartSLOduration=3.826060906 podStartE2EDuration="1m6.794523466s" podCreationTimestamp="2025-10-07 14:51:12 +0000 UTC" firstStartedPulling="2025-10-07 14:51:15.135415355 +0000 UTC m=+152.110593936" lastFinishedPulling="2025-10-07 14:52:18.103877915 +0000 UTC m=+215.079056496" observedRunningTime="2025-10-07 14:52:18.791146724 +0000 UTC m=+215.766325305" watchObservedRunningTime="2025-10-07 14:52:18.794523466 +0000 UTC m=+215.769702037" Oct 07 14:52:19 crc kubenswrapper[4672]: I1007 14:52:19.688788 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rvvc" event={"ID":"4d331851-6416-4fa6-965c-f20fa52b7d32","Type":"ContainerStarted","Data":"3b54722597279a1ca129a842853f32d28761ccdeb9b74f8a951c875786ed9852"} Oct 07 14:52:19 crc kubenswrapper[4672]: I1007 14:52:19.693265 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqwwk" event={"ID":"0b738796-9990-4a2d-b5ce-e86e79a7da40","Type":"ContainerStarted","Data":"d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c"} Oct 07 14:52:19 crc kubenswrapper[4672]: I1007 14:52:19.706790 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9rvvc" podStartSLOduration=3.362579681 podStartE2EDuration="1m8.706775398s" podCreationTimestamp="2025-10-07 14:51:11 +0000 UTC" firstStartedPulling="2025-10-07 14:51:14.088777819 +0000 UTC m=+151.063956400" lastFinishedPulling="2025-10-07 14:52:19.432973536 +0000 UTC m=+216.408152117" observedRunningTime="2025-10-07 14:52:19.70419592 +0000 UTC m=+216.679374511" watchObservedRunningTime="2025-10-07 14:52:19.706775398 +0000 UTC m=+216.681953979" Oct 07 14:52:19 crc kubenswrapper[4672]: I1007 14:52:19.735486 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nqwwk" podStartSLOduration=3.348461343 podStartE2EDuration="1m8.735467118s" podCreationTimestamp="2025-10-07 14:51:11 +0000 UTC" firstStartedPulling="2025-10-07 14:51:14.073916458 +0000 UTC m=+151.049095039" lastFinishedPulling="2025-10-07 14:52:19.460922233 +0000 UTC m=+216.436100814" observedRunningTime="2025-10-07 14:52:19.733853259 +0000 UTC m=+216.709031870" watchObservedRunningTime="2025-10-07 14:52:19.735467118 +0000 UTC m=+216.710645699" Oct 07 14:52:20 crc kubenswrapper[4672]: I1007 14:52:20.700010 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrfk8" event={"ID":"42794156-02f5-474c-8fa3-c13eb4db08b0","Type":"ContainerStarted","Data":"d647eea38bb3645a36a9994cbd7cac7a8562b99d3d2fd71c44f7167d3bca025b"} Oct 07 14:52:20 crc kubenswrapper[4672]: I1007 14:52:20.702943 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxvql" event={"ID":"f03b8179-5963-48e0-9f4c-88502776cab7","Type":"ContainerStarted","Data":"600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc"} Oct 07 14:52:20 crc kubenswrapper[4672]: I1007 14:52:20.704951 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qlpls" event={"ID":"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac","Type":"ContainerStarted","Data":"b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18"} Oct 07 14:52:20 crc kubenswrapper[4672]: I1007 14:52:20.720231 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mrfk8" podStartSLOduration=3.330951518 podStartE2EDuration="1m6.720194927s" podCreationTimestamp="2025-10-07 14:51:14 +0000 UTC" firstStartedPulling="2025-10-07 14:51:16.179578556 +0000 UTC m=+153.154757137" lastFinishedPulling="2025-10-07 14:52:19.568821965 +0000 UTC m=+216.544000546" observedRunningTime="2025-10-07 14:52:20.718600968 +0000 UTC m=+217.693779549" watchObservedRunningTime="2025-10-07 14:52:20.720194927 +0000 UTC m=+217.695373498" Oct 07 14:52:20 crc kubenswrapper[4672]: I1007 14:52:20.736802 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qlpls" podStartSLOduration=3.2357496550000002 podStartE2EDuration="1m6.73678386s" podCreationTimestamp="2025-10-07 14:51:14 +0000 UTC" firstStartedPulling="2025-10-07 14:51:16.181168858 +0000 UTC m=+153.156347439" lastFinishedPulling="2025-10-07 14:52:19.682203073 +0000 UTC m=+216.657381644" observedRunningTime="2025-10-07 14:52:20.735430819 +0000 UTC m=+217.710609410" watchObservedRunningTime="2025-10-07 14:52:20.73678386 +0000 UTC m=+217.711962441" Oct 07 14:52:20 crc kubenswrapper[4672]: I1007 14:52:20.758153 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hxvql" podStartSLOduration=3.692494132 podStartE2EDuration="1m9.758135387s" podCreationTimestamp="2025-10-07 14:51:11 +0000 UTC" firstStartedPulling="2025-10-07 14:51:14.069279515 +0000 UTC m=+151.044458096" lastFinishedPulling="2025-10-07 14:52:20.13492077 +0000 UTC m=+217.110099351" observedRunningTime="2025-10-07 14:52:20.754695243 +0000 UTC m=+217.729873834" watchObservedRunningTime="2025-10-07 14:52:20.758135387 +0000 UTC m=+217.733313968" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.286224 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.287240 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.417981 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.418292 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.468836 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.637646 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.637703 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.678390 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.822489 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:52:21 crc kubenswrapper[4672]: I1007 14:52:21.822800 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:52:22 crc kubenswrapper[4672]: I1007 14:52:22.453123 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-9rvvc" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerName="registry-server" probeResult="failure" output=< Oct 07 14:52:22 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 14:52:22 crc kubenswrapper[4672]: > Oct 07 14:52:22 crc kubenswrapper[4672]: I1007 14:52:22.753833 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:52:22 crc kubenswrapper[4672]: I1007 14:52:22.869202 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-hxvql" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" containerName="registry-server" probeResult="failure" output=< Oct 07 14:52:22 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 14:52:22 crc kubenswrapper[4672]: > Oct 07 14:52:23 crc kubenswrapper[4672]: I1007 14:52:23.246507 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:52:23 crc kubenswrapper[4672]: I1007 14:52:23.246581 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:52:23 crc kubenswrapper[4672]: I1007 14:52:23.284160 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:52:23 crc kubenswrapper[4672]: I1007 14:52:23.762300 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:52:24 crc kubenswrapper[4672]: I1007 14:52:24.373743 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:52:24 crc kubenswrapper[4672]: I1007 14:52:24.374102 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:52:24 crc kubenswrapper[4672]: I1007 14:52:24.412230 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:52:24 crc kubenswrapper[4672]: I1007 14:52:24.761530 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:52:24 crc kubenswrapper[4672]: I1007 14:52:24.824086 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:52:24 crc kubenswrapper[4672]: I1007 14:52:24.824368 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:52:24 crc kubenswrapper[4672]: I1007 14:52:24.857993 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:52:25 crc kubenswrapper[4672]: I1007 14:52:25.764410 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:52:26 crc kubenswrapper[4672]: I1007 14:52:26.651120 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 14:52:26 crc kubenswrapper[4672]: I1007 14:52:26.651197 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 14:52:26 crc kubenswrapper[4672]: I1007 14:52:26.651259 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:52:26 crc kubenswrapper[4672]: I1007 14:52:26.652985 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 14:52:26 crc kubenswrapper[4672]: I1007 14:52:26.654632 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d" gracePeriod=600 Oct 07 14:52:27 crc kubenswrapper[4672]: I1007 14:52:27.764373 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d" exitCode=0 Oct 07 14:52:27 crc kubenswrapper[4672]: I1007 14:52:27.764710 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d"} Oct 07 14:52:27 crc kubenswrapper[4672]: I1007 14:52:27.764921 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"f6fbe1825093d14900923be095641b75721f8bdd2190a6bd05844e8208070c4f"} Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.082615 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qlpls"] Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.083280 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qlpls" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerName="registry-server" containerID="cri-o://b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18" gracePeriod=2 Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.538217 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.654063 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-utilities\") pod \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.654548 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-catalog-content\") pod \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.654616 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f4rp\" (UniqueName: \"kubernetes.io/projected/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-kube-api-access-8f4rp\") pod \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\" (UID: \"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac\") " Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.655446 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-utilities" (OuterVolumeSpecName: "utilities") pod "cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" (UID: "cf1fc2f5-50a9-4f47-8acc-b2495f3388ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.665651 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-kube-api-access-8f4rp" (OuterVolumeSpecName: "kube-api-access-8f4rp") pod "cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" (UID: "cf1fc2f5-50a9-4f47-8acc-b2495f3388ac"). InnerVolumeSpecName "kube-api-access-8f4rp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.747821 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" (UID: "cf1fc2f5-50a9-4f47-8acc-b2495f3388ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.756052 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.756100 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f4rp\" (UniqueName: \"kubernetes.io/projected/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-kube-api-access-8f4rp\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.756114 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.778062 4672 generic.go:334] "Generic (PLEG): container finished" podID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerID="b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18" exitCode=0 Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.778146 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qlpls" event={"ID":"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac","Type":"ContainerDied","Data":"b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18"} Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.778175 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qlpls" event={"ID":"cf1fc2f5-50a9-4f47-8acc-b2495f3388ac","Type":"ContainerDied","Data":"373048e6b16189e217ac60b8d79a1ba23768c0943baa89a41de960f53c701d96"} Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.778188 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qlpls" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.778197 4672 scope.go:117] "RemoveContainer" containerID="b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.787636 4672 generic.go:334] "Generic (PLEG): container finished" podID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerID="983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7" exitCode=0 Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.787716 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7spfb" event={"ID":"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c","Type":"ContainerDied","Data":"983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7"} Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.816370 4672 scope.go:117] "RemoveContainer" containerID="7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.834885 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qlpls"] Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.842865 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qlpls"] Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.843355 4672 scope.go:117] "RemoveContainer" containerID="e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.861648 4672 scope.go:117] "RemoveContainer" containerID="b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18" Oct 07 14:52:29 crc kubenswrapper[4672]: E1007 14:52:29.862167 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18\": container with ID starting with b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18 not found: ID does not exist" containerID="b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.862243 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18"} err="failed to get container status \"b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18\": rpc error: code = NotFound desc = could not find container \"b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18\": container with ID starting with b267d658d83b7f013bfed8b9ba220694b5b61d97c42739e84ef1e45cbe6a7a18 not found: ID does not exist" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.862270 4672 scope.go:117] "RemoveContainer" containerID="7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e" Oct 07 14:52:29 crc kubenswrapper[4672]: E1007 14:52:29.862675 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e\": container with ID starting with 7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e not found: ID does not exist" containerID="7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.862753 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e"} err="failed to get container status \"7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e\": rpc error: code = NotFound desc = could not find container \"7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e\": container with ID starting with 7139f40850fded8edb87e1beb500f27ac442b358c8eca18484ddace905bc031e not found: ID does not exist" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.862796 4672 scope.go:117] "RemoveContainer" containerID="e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e" Oct 07 14:52:29 crc kubenswrapper[4672]: E1007 14:52:29.863185 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e\": container with ID starting with e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e not found: ID does not exist" containerID="e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.863217 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e"} err="failed to get container status \"e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e\": rpc error: code = NotFound desc = could not find container \"e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e\": container with ID starting with e02e367a02099879400cdc738786022a17a64814c57f07f3bd72a30b15cb894e not found: ID does not exist" Oct 07 14:52:29 crc kubenswrapper[4672]: I1007 14:52:29.899479 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" path="/var/lib/kubelet/pods/cf1fc2f5-50a9-4f47-8acc-b2495f3388ac/volumes" Oct 07 14:52:30 crc kubenswrapper[4672]: I1007 14:52:30.806803 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7spfb" event={"ID":"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c","Type":"ContainerStarted","Data":"8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377"} Oct 07 14:52:31 crc kubenswrapper[4672]: I1007 14:52:31.458655 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:52:31 crc kubenswrapper[4672]: I1007 14:52:31.476443 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7spfb" podStartSLOduration=3.197027984 podStartE2EDuration="1m18.476427885s" podCreationTimestamp="2025-10-07 14:51:13 +0000 UTC" firstStartedPulling="2025-10-07 14:51:15.133161716 +0000 UTC m=+152.108340297" lastFinishedPulling="2025-10-07 14:52:30.412561617 +0000 UTC m=+227.387740198" observedRunningTime="2025-10-07 14:52:30.829910661 +0000 UTC m=+227.805089232" watchObservedRunningTime="2025-10-07 14:52:31.476427885 +0000 UTC m=+228.451606466" Oct 07 14:52:31 crc kubenswrapper[4672]: I1007 14:52:31.499683 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:52:31 crc kubenswrapper[4672]: I1007 14:52:31.681405 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:52:31 crc kubenswrapper[4672]: I1007 14:52:31.878972 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:52:31 crc kubenswrapper[4672]: I1007 14:52:31.942756 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:52:33 crc kubenswrapper[4672]: I1007 14:52:33.593118 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:52:33 crc kubenswrapper[4672]: I1007 14:52:33.593156 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:52:33 crc kubenswrapper[4672]: I1007 14:52:33.629088 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:52:34 crc kubenswrapper[4672]: I1007 14:52:34.879348 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nqwwk"] Oct 07 14:52:34 crc kubenswrapper[4672]: I1007 14:52:34.879641 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nqwwk" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerName="registry-server" containerID="cri-o://d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c" gracePeriod=2 Oct 07 14:52:35 crc kubenswrapper[4672]: I1007 14:52:35.483594 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hxvql"] Oct 07 14:52:35 crc kubenswrapper[4672]: I1007 14:52:35.484251 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hxvql" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" containerName="registry-server" containerID="cri-o://600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc" gracePeriod=2 Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.551771 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.566761 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.641436 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26v2k\" (UniqueName: \"kubernetes.io/projected/0b738796-9990-4a2d-b5ce-e86e79a7da40-kube-api-access-26v2k\") pod \"0b738796-9990-4a2d-b5ce-e86e79a7da40\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.641816 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-utilities\") pod \"0b738796-9990-4a2d-b5ce-e86e79a7da40\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.641889 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-utilities\") pod \"f03b8179-5963-48e0-9f4c-88502776cab7\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.641918 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-catalog-content\") pod \"f03b8179-5963-48e0-9f4c-88502776cab7\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.641945 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dd6b4\" (UniqueName: \"kubernetes.io/projected/f03b8179-5963-48e0-9f4c-88502776cab7-kube-api-access-dd6b4\") pod \"f03b8179-5963-48e0-9f4c-88502776cab7\" (UID: \"f03b8179-5963-48e0-9f4c-88502776cab7\") " Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.641975 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-catalog-content\") pod \"0b738796-9990-4a2d-b5ce-e86e79a7da40\" (UID: \"0b738796-9990-4a2d-b5ce-e86e79a7da40\") " Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.643008 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-utilities" (OuterVolumeSpecName: "utilities") pod "f03b8179-5963-48e0-9f4c-88502776cab7" (UID: "f03b8179-5963-48e0-9f4c-88502776cab7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.643250 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-utilities" (OuterVolumeSpecName: "utilities") pod "0b738796-9990-4a2d-b5ce-e86e79a7da40" (UID: "0b738796-9990-4a2d-b5ce-e86e79a7da40"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.651489 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b738796-9990-4a2d-b5ce-e86e79a7da40-kube-api-access-26v2k" (OuterVolumeSpecName: "kube-api-access-26v2k") pod "0b738796-9990-4a2d-b5ce-e86e79a7da40" (UID: "0b738796-9990-4a2d-b5ce-e86e79a7da40"). InnerVolumeSpecName "kube-api-access-26v2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.651489 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f03b8179-5963-48e0-9f4c-88502776cab7-kube-api-access-dd6b4" (OuterVolumeSpecName: "kube-api-access-dd6b4") pod "f03b8179-5963-48e0-9f4c-88502776cab7" (UID: "f03b8179-5963-48e0-9f4c-88502776cab7"). InnerVolumeSpecName "kube-api-access-dd6b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.699944 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b738796-9990-4a2d-b5ce-e86e79a7da40" (UID: "0b738796-9990-4a2d-b5ce-e86e79a7da40"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.711430 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f03b8179-5963-48e0-9f4c-88502776cab7" (UID: "f03b8179-5963-48e0-9f4c-88502776cab7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.743645 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dd6b4\" (UniqueName: \"kubernetes.io/projected/f03b8179-5963-48e0-9f4c-88502776cab7-kube-api-access-dd6b4\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.744213 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.744415 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26v2k\" (UniqueName: \"kubernetes.io/projected/0b738796-9990-4a2d-b5ce-e86e79a7da40-kube-api-access-26v2k\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.744569 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b738796-9990-4a2d-b5ce-e86e79a7da40-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.744786 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.744996 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f03b8179-5963-48e0-9f4c-88502776cab7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.840983 4672 generic.go:334] "Generic (PLEG): container finished" podID="f03b8179-5963-48e0-9f4c-88502776cab7" containerID="600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc" exitCode=0 Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.841107 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxvql" event={"ID":"f03b8179-5963-48e0-9f4c-88502776cab7","Type":"ContainerDied","Data":"600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc"} Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.841133 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxvql" event={"ID":"f03b8179-5963-48e0-9f4c-88502776cab7","Type":"ContainerDied","Data":"d62084eca1be4a050da3f1f4121e7a57c41df852fe79908f5085ad3469875476"} Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.841149 4672 scope.go:117] "RemoveContainer" containerID="600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.841619 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hxvql" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.844455 4672 generic.go:334] "Generic (PLEG): container finished" podID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerID="d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c" exitCode=0 Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.844514 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqwwk" event={"ID":"0b738796-9990-4a2d-b5ce-e86e79a7da40","Type":"ContainerDied","Data":"d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c"} Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.844560 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nqwwk" event={"ID":"0b738796-9990-4a2d-b5ce-e86e79a7da40","Type":"ContainerDied","Data":"83c0fd239f5a49f9ebbfb4167a3f8b1fb415f924809980909576ab795c415cda"} Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.844591 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nqwwk" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.862748 4672 scope.go:117] "RemoveContainer" containerID="5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.879374 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hxvql"] Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.882835 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hxvql"] Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.886103 4672 scope.go:117] "RemoveContainer" containerID="17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.900188 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nqwwk"] Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.902520 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nqwwk"] Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.905986 4672 scope.go:117] "RemoveContainer" containerID="600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc" Oct 07 14:52:36 crc kubenswrapper[4672]: E1007 14:52:36.906703 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc\": container with ID starting with 600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc not found: ID does not exist" containerID="600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.906749 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc"} err="failed to get container status \"600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc\": rpc error: code = NotFound desc = could not find container \"600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc\": container with ID starting with 600352fa4db699b5f1abc113e91f20989114bfd78f31cea54f9f67134a90bbbc not found: ID does not exist" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.906781 4672 scope.go:117] "RemoveContainer" containerID="5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4" Oct 07 14:52:36 crc kubenswrapper[4672]: E1007 14:52:36.907384 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4\": container with ID starting with 5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4 not found: ID does not exist" containerID="5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.907432 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4"} err="failed to get container status \"5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4\": rpc error: code = NotFound desc = could not find container \"5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4\": container with ID starting with 5a987a7d88cc7fb7e4c67f70dd0d40fbdec38129fb1d9b075aec1c88f13a32e4 not found: ID does not exist" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.907473 4672 scope.go:117] "RemoveContainer" containerID="17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1" Oct 07 14:52:36 crc kubenswrapper[4672]: E1007 14:52:36.907875 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1\": container with ID starting with 17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1 not found: ID does not exist" containerID="17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.907900 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1"} err="failed to get container status \"17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1\": rpc error: code = NotFound desc = could not find container \"17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1\": container with ID starting with 17996097af5957ad02229c25dc78a440fbe965f226b5f86fd40278f5b91a55a1 not found: ID does not exist" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.907918 4672 scope.go:117] "RemoveContainer" containerID="d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.920417 4672 scope.go:117] "RemoveContainer" containerID="57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.934877 4672 scope.go:117] "RemoveContainer" containerID="82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.947600 4672 scope.go:117] "RemoveContainer" containerID="d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c" Oct 07 14:52:36 crc kubenswrapper[4672]: E1007 14:52:36.948051 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c\": container with ID starting with d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c not found: ID does not exist" containerID="d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.948085 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c"} err="failed to get container status \"d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c\": rpc error: code = NotFound desc = could not find container \"d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c\": container with ID starting with d68d1f007f06bb819ea74c0ed685b10fe74b3248f9c2be0ff34c94ba2ec51b6c not found: ID does not exist" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.948107 4672 scope.go:117] "RemoveContainer" containerID="57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3" Oct 07 14:52:36 crc kubenswrapper[4672]: E1007 14:52:36.948564 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3\": container with ID starting with 57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3 not found: ID does not exist" containerID="57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.948592 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3"} err="failed to get container status \"57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3\": rpc error: code = NotFound desc = could not find container \"57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3\": container with ID starting with 57f5c3f622ed0d97b793d351dac2f03637252c5e912156e3038c64c3b8ce69b3 not found: ID does not exist" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.948610 4672 scope.go:117] "RemoveContainer" containerID="82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d" Oct 07 14:52:36 crc kubenswrapper[4672]: E1007 14:52:36.948941 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d\": container with ID starting with 82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d not found: ID does not exist" containerID="82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d" Oct 07 14:52:36 crc kubenswrapper[4672]: I1007 14:52:36.948961 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d"} err="failed to get container status \"82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d\": rpc error: code = NotFound desc = could not find container \"82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d\": container with ID starting with 82ab6dac4d543442ed1d95a3b2a5a5bb8a0ca4ca5c436f43fece68ab12d8396d not found: ID does not exist" Oct 07 14:52:37 crc kubenswrapper[4672]: I1007 14:52:37.898657 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" path="/var/lib/kubelet/pods/0b738796-9990-4a2d-b5ce-e86e79a7da40/volumes" Oct 07 14:52:37 crc kubenswrapper[4672]: I1007 14:52:37.900288 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" path="/var/lib/kubelet/pods/f03b8179-5963-48e0-9f4c-88502776cab7/volumes" Oct 07 14:52:43 crc kubenswrapper[4672]: I1007 14:52:43.630730 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:52:45 crc kubenswrapper[4672]: I1007 14:52:45.880446 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7spfb"] Oct 07 14:52:45 crc kubenswrapper[4672]: I1007 14:52:45.881165 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7spfb" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerName="registry-server" containerID="cri-o://8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377" gracePeriod=2 Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.219061 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.380192 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkwjf\" (UniqueName: \"kubernetes.io/projected/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-kube-api-access-qkwjf\") pod \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.380235 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-catalog-content\") pod \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.380319 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-utilities\") pod \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\" (UID: \"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c\") " Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.381250 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-utilities" (OuterVolumeSpecName: "utilities") pod "c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" (UID: "c36536f1-8c7b-44ba-9573-04f1d3f9ed3c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.381539 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.386214 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-kube-api-access-qkwjf" (OuterVolumeSpecName: "kube-api-access-qkwjf") pod "c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" (UID: "c36536f1-8c7b-44ba-9573-04f1d3f9ed3c"). InnerVolumeSpecName "kube-api-access-qkwjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.393541 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" (UID: "c36536f1-8c7b-44ba-9573-04f1d3f9ed3c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.482740 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.483060 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkwjf\" (UniqueName: \"kubernetes.io/projected/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c-kube-api-access-qkwjf\") on node \"crc\" DevicePath \"\"" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.901286 4672 generic.go:334] "Generic (PLEG): container finished" podID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerID="8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377" exitCode=0 Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.902153 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7spfb" event={"ID":"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c","Type":"ContainerDied","Data":"8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377"} Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.902219 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7spfb" event={"ID":"c36536f1-8c7b-44ba-9573-04f1d3f9ed3c","Type":"ContainerDied","Data":"675ef1fb149d2ab3748654affcda3b9ebf514b6714a0aff91cacc2183f46f970"} Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.902252 4672 scope.go:117] "RemoveContainer" containerID="8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.902404 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7spfb" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.917531 4672 scope.go:117] "RemoveContainer" containerID="983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.930549 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7spfb"] Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.935807 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7spfb"] Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.956028 4672 scope.go:117] "RemoveContainer" containerID="1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.971086 4672 scope.go:117] "RemoveContainer" containerID="8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377" Oct 07 14:52:46 crc kubenswrapper[4672]: E1007 14:52:46.971627 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377\": container with ID starting with 8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377 not found: ID does not exist" containerID="8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.971729 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377"} err="failed to get container status \"8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377\": rpc error: code = NotFound desc = could not find container \"8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377\": container with ID starting with 8d59da0238c29637630c4c1466400e8a72a00f7fc8f2813b9be02338f8fba377 not found: ID does not exist" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.971835 4672 scope.go:117] "RemoveContainer" containerID="983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7" Oct 07 14:52:46 crc kubenswrapper[4672]: E1007 14:52:46.972489 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7\": container with ID starting with 983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7 not found: ID does not exist" containerID="983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.972518 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7"} err="failed to get container status \"983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7\": rpc error: code = NotFound desc = could not find container \"983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7\": container with ID starting with 983b3c10fc772a72a04947e200ae86407c5843451f5fbdcd5902c4987ec141f7 not found: ID does not exist" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.972535 4672 scope.go:117] "RemoveContainer" containerID="1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a" Oct 07 14:52:46 crc kubenswrapper[4672]: E1007 14:52:46.972918 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a\": container with ID starting with 1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a not found: ID does not exist" containerID="1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a" Oct 07 14:52:46 crc kubenswrapper[4672]: I1007 14:52:46.972973 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a"} err="failed to get container status \"1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a\": rpc error: code = NotFound desc = could not find container \"1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a\": container with ID starting with 1f82e5e7fde12bc14d4aff409e6ad6c3d4795787a12a5f15645b6d57290ec67a not found: ID does not exist" Oct 07 14:52:47 crc kubenswrapper[4672]: I1007 14:52:47.897914 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" path="/var/lib/kubelet/pods/c36536f1-8c7b-44ba-9573-04f1d3f9ed3c/volumes" Oct 07 14:53:35 crc kubenswrapper[4672]: I1007 14:53:35.880913 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hknhb"] Oct 07 14:54:00 crc kubenswrapper[4672]: I1007 14:54:00.913803 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" podUID="e626da5b-36d9-405a-b44e-fb6c355a51ba" containerName="oauth-openshift" containerID="cri-o://224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc" gracePeriod=15 Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.758478 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.795966 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-68cb54d767-gfbml"] Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796187 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerName="extract-content" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796202 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerName="extract-content" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796213 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e626da5b-36d9-405a-b44e-fb6c355a51ba" containerName="oauth-openshift" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796219 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="e626da5b-36d9-405a-b44e-fb6c355a51ba" containerName="oauth-openshift" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796229 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb375547-a365-4528-b4a4-98ad3819b58b" containerName="pruner" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796235 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb375547-a365-4528-b4a4-98ad3819b58b" containerName="pruner" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796249 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796256 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796263 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerName="extract-utilities" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796268 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerName="extract-utilities" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796274 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796280 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796289 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerName="extract-content" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796295 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerName="extract-content" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796303 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796309 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796317 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerName="extract-content" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796322 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerName="extract-content" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796331 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerName="extract-utilities" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796337 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerName="extract-utilities" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796349 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" containerName="extract-content" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796364 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" containerName="extract-content" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796375 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerName="extract-utilities" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796383 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerName="extract-utilities" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796393 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796401 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: E1007 14:54:01.796409 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" containerName="extract-utilities" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796415 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" containerName="extract-utilities" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796536 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="e626da5b-36d9-405a-b44e-fb6c355a51ba" containerName="oauth-openshift" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796548 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b738796-9990-4a2d-b5ce-e86e79a7da40" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796561 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="c36536f1-8c7b-44ba-9573-04f1d3f9ed3c" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796572 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf1fc2f5-50a9-4f47-8acc-b2495f3388ac" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796581 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb375547-a365-4528-b4a4-98ad3819b58b" containerName="pruner" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796591 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f03b8179-5963-48e0-9f4c-88502776cab7" containerName="registry-server" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.796936 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.801355 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-68cb54d767-gfbml"] Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.906718 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-dir\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.906821 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-service-ca\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.906862 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-trusted-ca-bundle\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.906897 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfjl2\" (UniqueName: \"kubernetes.io/projected/e626da5b-36d9-405a-b44e-fb6c355a51ba-kube-api-access-jfjl2\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.906935 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-cliconfig\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.906913 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.906988 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-error\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907043 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-serving-cert\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907081 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-session\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907113 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-ocp-branding-template\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907186 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-provider-selection\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907221 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-login\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907262 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-router-certs\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907288 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-policies\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907339 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-idp-0-file-data\") pod \"e626da5b-36d9-405a-b44e-fb6c355a51ba\" (UID: \"e626da5b-36d9-405a-b44e-fb6c355a51ba\") " Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907631 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-service-ca\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907676 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907725 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907755 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907778 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-template-login\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907794 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907799 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8s2b\" (UniqueName: \"kubernetes.io/projected/de5685e6-2277-4a27-98be-e87ed2c1a047-kube-api-access-w8s2b\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907879 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-audit-policies\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907877 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.907908 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.908125 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.908396 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-session\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.908412 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.908456 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-router-certs\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.908547 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-template-error\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.908595 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.908694 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.908805 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de5685e6-2277-4a27-98be-e87ed2c1a047-audit-dir\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.909558 4672 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.909745 4672 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e626da5b-36d9-405a-b44e-fb6c355a51ba-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.909857 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.909960 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.910117 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.913392 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.913945 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.914352 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.914712 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.915005 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.915351 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.915355 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.917431 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:54:01 crc kubenswrapper[4672]: I1007 14:54:01.918300 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e626da5b-36d9-405a-b44e-fb6c355a51ba-kube-api-access-jfjl2" (OuterVolumeSpecName: "kube-api-access-jfjl2") pod "e626da5b-36d9-405a-b44e-fb6c355a51ba" (UID: "e626da5b-36d9-405a-b44e-fb6c355a51ba"). InnerVolumeSpecName "kube-api-access-jfjl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011706 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-audit-policies\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011756 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011782 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011814 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-session\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011842 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-router-certs\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011863 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-template-error\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011890 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011934 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de5685e6-2277-4a27-98be-e87ed2c1a047-audit-dir\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011960 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-service-ca\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.011985 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012054 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012092 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012114 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-template-login\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012131 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8s2b\" (UniqueName: \"kubernetes.io/projected/de5685e6-2277-4a27-98be-e87ed2c1a047-kube-api-access-w8s2b\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012166 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012177 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012187 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012196 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012205 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfjl2\" (UniqueName: \"kubernetes.io/projected/e626da5b-36d9-405a-b44e-fb6c355a51ba-kube-api-access-jfjl2\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012225 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012236 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012246 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012254 4672 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e626da5b-36d9-405a-b44e-fb6c355a51ba-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012574 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-audit-policies\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.012901 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de5685e6-2277-4a27-98be-e87ed2c1a047-audit-dir\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.013567 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.013899 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.014061 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-service-ca\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.017623 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.017996 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-router-certs\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.018253 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-session\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.018534 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.018696 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.018796 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.019078 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-template-error\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.019325 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de5685e6-2277-4a27-98be-e87ed2c1a047-v4-0-config-user-template-login\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.028126 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8s2b\" (UniqueName: \"kubernetes.io/projected/de5685e6-2277-4a27-98be-e87ed2c1a047-kube-api-access-w8s2b\") pod \"oauth-openshift-68cb54d767-gfbml\" (UID: \"de5685e6-2277-4a27-98be-e87ed2c1a047\") " pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.122695 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.278536 4672 generic.go:334] "Generic (PLEG): container finished" podID="e626da5b-36d9-405a-b44e-fb6c355a51ba" containerID="224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc" exitCode=0 Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.278584 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" event={"ID":"e626da5b-36d9-405a-b44e-fb6c355a51ba","Type":"ContainerDied","Data":"224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc"} Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.278610 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" event={"ID":"e626da5b-36d9-405a-b44e-fb6c355a51ba","Type":"ContainerDied","Data":"775fc33a6fab06b48bf277f0de1525a3d1ff88e7f719c4eda1e230db1d44ccd1"} Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.278628 4672 scope.go:117] "RemoveContainer" containerID="224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.278780 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hknhb" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.314759 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hknhb"] Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.315819 4672 scope.go:117] "RemoveContainer" containerID="224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc" Oct 07 14:54:02 crc kubenswrapper[4672]: E1007 14:54:02.316435 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc\": container with ID starting with 224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc not found: ID does not exist" containerID="224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.316490 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc"} err="failed to get container status \"224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc\": rpc error: code = NotFound desc = could not find container \"224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc\": container with ID starting with 224ea4fc60d2f539aae0f6e56646a0cbbac0cafdb1e06b3e10b321d8700200dc not found: ID does not exist" Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.318200 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-68cb54d767-gfbml"] Oct 07 14:54:02 crc kubenswrapper[4672]: I1007 14:54:02.321773 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hknhb"] Oct 07 14:54:03 crc kubenswrapper[4672]: I1007 14:54:03.286371 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" event={"ID":"de5685e6-2277-4a27-98be-e87ed2c1a047","Type":"ContainerStarted","Data":"25828d29be332e18e4dfa083e2cbd01063aa7f7efaccede8ab2c9cb04beff772"} Oct 07 14:54:03 crc kubenswrapper[4672]: I1007 14:54:03.286695 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:03 crc kubenswrapper[4672]: I1007 14:54:03.286807 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" event={"ID":"de5685e6-2277-4a27-98be-e87ed2c1a047","Type":"ContainerStarted","Data":"96234d4768a4b423524cc8bc8cf5f78ee405fbc636a1b99772f9e0741d9ae4a5"} Oct 07 14:54:03 crc kubenswrapper[4672]: I1007 14:54:03.303680 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" podStartSLOduration=28.303662895 podStartE2EDuration="28.303662895s" podCreationTimestamp="2025-10-07 14:53:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:54:03.302113948 +0000 UTC m=+320.277292539" watchObservedRunningTime="2025-10-07 14:54:03.303662895 +0000 UTC m=+320.278841486" Oct 07 14:54:03 crc kubenswrapper[4672]: I1007 14:54:03.682118 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-68cb54d767-gfbml" Oct 07 14:54:03 crc kubenswrapper[4672]: I1007 14:54:03.899179 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e626da5b-36d9-405a-b44e-fb6c355a51ba" path="/var/lib/kubelet/pods/e626da5b-36d9-405a-b44e-fb6c355a51ba/volumes" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.119641 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qgmqj"] Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.120410 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qgmqj" podUID="ac1d9066-0e72-4070-943d-fc2b01091892" containerName="registry-server" containerID="cri-o://2837aa6bbae037780f052ba3b803d3be4645a1491b7c8ee943a74bf8eeb1af88" gracePeriod=30 Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.134657 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9rvvc"] Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.135240 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5t2r9"] Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.135473 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" podUID="f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" containerName="marketplace-operator" containerID="cri-o://7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f" gracePeriod=30 Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.135742 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9rvvc" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerName="registry-server" containerID="cri-o://3b54722597279a1ca129a842853f32d28761ccdeb9b74f8a951c875786ed9852" gracePeriod=30 Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.146255 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjdt6"] Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.146564 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zjdt6" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" containerName="registry-server" containerID="cri-o://e3b0171c4b475385dd42c53cc99595e4dd7b3e673d84f6bf388d843477ba3e37" gracePeriod=30 Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.158151 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ljjfp"] Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.158925 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.166211 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mrfk8"] Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.166546 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mrfk8" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerName="registry-server" containerID="cri-o://d647eea38bb3645a36a9994cbd7cac7a8562b99d3d2fd71c44f7167d3bca025b" gracePeriod=30 Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.179545 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ljjfp"] Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.258280 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/663cdf72-a5c3-4331-90ce-a3b2dfc41c1d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ljjfp\" (UID: \"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.258363 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjpvx\" (UniqueName: \"kubernetes.io/projected/663cdf72-a5c3-4331-90ce-a3b2dfc41c1d-kube-api-access-tjpvx\") pod \"marketplace-operator-79b997595-ljjfp\" (UID: \"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.258542 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/663cdf72-a5c3-4331-90ce-a3b2dfc41c1d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ljjfp\" (UID: \"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.359045 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjpvx\" (UniqueName: \"kubernetes.io/projected/663cdf72-a5c3-4331-90ce-a3b2dfc41c1d-kube-api-access-tjpvx\") pod \"marketplace-operator-79b997595-ljjfp\" (UID: \"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.359127 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/663cdf72-a5c3-4331-90ce-a3b2dfc41c1d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ljjfp\" (UID: \"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.359154 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/663cdf72-a5c3-4331-90ce-a3b2dfc41c1d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ljjfp\" (UID: \"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.360121 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/663cdf72-a5c3-4331-90ce-a3b2dfc41c1d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ljjfp\" (UID: \"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.365648 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/663cdf72-a5c3-4331-90ce-a3b2dfc41c1d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ljjfp\" (UID: \"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.383573 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjpvx\" (UniqueName: \"kubernetes.io/projected/663cdf72-a5c3-4331-90ce-a3b2dfc41c1d-kube-api-access-tjpvx\") pod \"marketplace-operator-79b997595-ljjfp\" (UID: \"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.440910 4672 generic.go:334] "Generic (PLEG): container finished" podID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerID="d647eea38bb3645a36a9994cbd7cac7a8562b99d3d2fd71c44f7167d3bca025b" exitCode=0 Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.441030 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrfk8" event={"ID":"42794156-02f5-474c-8fa3-c13eb4db08b0","Type":"ContainerDied","Data":"d647eea38bb3645a36a9994cbd7cac7a8562b99d3d2fd71c44f7167d3bca025b"} Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.443405 4672 generic.go:334] "Generic (PLEG): container finished" podID="ac1d9066-0e72-4070-943d-fc2b01091892" containerID="2837aa6bbae037780f052ba3b803d3be4645a1491b7c8ee943a74bf8eeb1af88" exitCode=0 Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.443458 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgmqj" event={"ID":"ac1d9066-0e72-4070-943d-fc2b01091892","Type":"ContainerDied","Data":"2837aa6bbae037780f052ba3b803d3be4645a1491b7c8ee943a74bf8eeb1af88"} Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.445820 4672 generic.go:334] "Generic (PLEG): container finished" podID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerID="3b54722597279a1ca129a842853f32d28761ccdeb9b74f8a951c875786ed9852" exitCode=0 Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.445890 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rvvc" event={"ID":"4d331851-6416-4fa6-965c-f20fa52b7d32","Type":"ContainerDied","Data":"3b54722597279a1ca129a842853f32d28761ccdeb9b74f8a951c875786ed9852"} Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.449689 4672 generic.go:334] "Generic (PLEG): container finished" podID="ded2f33e-57fb-4fd2-9477-92de294be838" containerID="e3b0171c4b475385dd42c53cc99595e4dd7b3e673d84f6bf388d843477ba3e37" exitCode=0 Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.449703 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjdt6" event={"ID":"ded2f33e-57fb-4fd2-9477-92de294be838","Type":"ContainerDied","Data":"e3b0171c4b475385dd42c53cc99595e4dd7b3e673d84f6bf388d843477ba3e37"} Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.490120 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:29 crc kubenswrapper[4672]: I1007 14:54:29.913956 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ljjfp"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.131826 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.169205 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7n2qn\" (UniqueName: \"kubernetes.io/projected/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-kube-api-access-7n2qn\") pod \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.169267 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-trusted-ca\") pod \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.169349 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-operator-metrics\") pod \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\" (UID: \"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.173552 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" (UID: "f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.179192 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" (UID: "f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.179565 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-kube-api-access-7n2qn" (OuterVolumeSpecName: "kube-api-access-7n2qn") pod "f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" (UID: "f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0"). InnerVolumeSpecName "kube-api-access-7n2qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.245821 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.271027 4672 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.271058 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7n2qn\" (UniqueName: \"kubernetes.io/projected/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-kube-api-access-7n2qn\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.271066 4672 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.301939 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.310520 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.358714 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.371646 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmxr9\" (UniqueName: \"kubernetes.io/projected/4d331851-6416-4fa6-965c-f20fa52b7d32-kube-api-access-bmxr9\") pod \"4d331851-6416-4fa6-965c-f20fa52b7d32\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.371734 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-catalog-content\") pod \"4d331851-6416-4fa6-965c-f20fa52b7d32\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.371816 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-utilities\") pod \"4d331851-6416-4fa6-965c-f20fa52b7d32\" (UID: \"4d331851-6416-4fa6-965c-f20fa52b7d32\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.372845 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-utilities" (OuterVolumeSpecName: "utilities") pod "4d331851-6416-4fa6-965c-f20fa52b7d32" (UID: "4d331851-6416-4fa6-965c-f20fa52b7d32"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.381330 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d331851-6416-4fa6-965c-f20fa52b7d32-kube-api-access-bmxr9" (OuterVolumeSpecName: "kube-api-access-bmxr9") pod "4d331851-6416-4fa6-965c-f20fa52b7d32" (UID: "4d331851-6416-4fa6-965c-f20fa52b7d32"). InnerVolumeSpecName "kube-api-access-bmxr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.438299 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d331851-6416-4fa6-965c-f20fa52b7d32" (UID: "4d331851-6416-4fa6-965c-f20fa52b7d32"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.454829 4672 generic.go:334] "Generic (PLEG): container finished" podID="f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" containerID="7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f" exitCode=0 Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.454880 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" event={"ID":"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0","Type":"ContainerDied","Data":"7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f"} Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.454904 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" event={"ID":"f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0","Type":"ContainerDied","Data":"886fac729d64bedf37a106d9e0539feee1371aecf1d76777e48b56dc3063143c"} Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.454919 4672 scope.go:117] "RemoveContainer" containerID="7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.455306 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5t2r9" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.461059 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjdt6" event={"ID":"ded2f33e-57fb-4fd2-9477-92de294be838","Type":"ContainerDied","Data":"1abdb43779669cb2ce249134bcfd1d3af14622dd05828b05c8e35ceb278028cf"} Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.461071 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zjdt6" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.465615 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrfk8" event={"ID":"42794156-02f5-474c-8fa3-c13eb4db08b0","Type":"ContainerDied","Data":"31abf446c96323b422291ac488b2e0b3f270386ee6078301df83e2eb529120df"} Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.465716 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mrfk8" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.468082 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qgmqj" event={"ID":"ac1d9066-0e72-4070-943d-fc2b01091892","Type":"ContainerDied","Data":"58bb6691a73b5cb11a65926b51972cd7d9ba80a4923c690cdc3381df32b64fe0"} Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.468144 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qgmqj" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472112 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" event={"ID":"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d","Type":"ContainerStarted","Data":"26a7707bc661117fb9d08621fc429e4931d7c62b215d6bc8080412b88e0fc407"} Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472142 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" event={"ID":"663cdf72-a5c3-4331-90ce-a3b2dfc41c1d","Type":"ContainerStarted","Data":"3686b46a04d973d64b9907c607a73a4a6ecd433f929d6fb8566691ff43abacf7"} Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472203 4672 scope.go:117] "RemoveContainer" containerID="7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472495 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzr55\" (UniqueName: \"kubernetes.io/projected/ded2f33e-57fb-4fd2-9477-92de294be838-kube-api-access-nzr55\") pod \"ded2f33e-57fb-4fd2-9477-92de294be838\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472566 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-utilities\") pod \"ded2f33e-57fb-4fd2-9477-92de294be838\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472596 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-utilities\") pod \"42794156-02f5-474c-8fa3-c13eb4db08b0\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472635 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-utilities\") pod \"ac1d9066-0e72-4070-943d-fc2b01091892\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472665 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4r72\" (UniqueName: \"kubernetes.io/projected/ac1d9066-0e72-4070-943d-fc2b01091892-kube-api-access-c4r72\") pod \"ac1d9066-0e72-4070-943d-fc2b01091892\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472700 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-catalog-content\") pod \"42794156-02f5-474c-8fa3-c13eb4db08b0\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472724 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fspz6\" (UniqueName: \"kubernetes.io/projected/42794156-02f5-474c-8fa3-c13eb4db08b0-kube-api-access-fspz6\") pod \"42794156-02f5-474c-8fa3-c13eb4db08b0\" (UID: \"42794156-02f5-474c-8fa3-c13eb4db08b0\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472745 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-catalog-content\") pod \"ac1d9066-0e72-4070-943d-fc2b01091892\" (UID: \"ac1d9066-0e72-4070-943d-fc2b01091892\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472761 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-catalog-content\") pod \"ded2f33e-57fb-4fd2-9477-92de294be838\" (UID: \"ded2f33e-57fb-4fd2-9477-92de294be838\") " Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472977 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmxr9\" (UniqueName: \"kubernetes.io/projected/4d331851-6416-4fa6-965c-f20fa52b7d32-kube-api-access-bmxr9\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.472999 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.473015 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d331851-6416-4fa6-965c-f20fa52b7d32-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.473177 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.473323 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-utilities" (OuterVolumeSpecName: "utilities") pod "ded2f33e-57fb-4fd2-9477-92de294be838" (UID: "ded2f33e-57fb-4fd2-9477-92de294be838"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.474679 4672 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-ljjfp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.474715 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" podUID="663cdf72-a5c3-4331-90ce-a3b2dfc41c1d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.474849 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-utilities" (OuterVolumeSpecName: "utilities") pod "42794156-02f5-474c-8fa3-c13eb4db08b0" (UID: "42794156-02f5-474c-8fa3-c13eb4db08b0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.475412 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-utilities" (OuterVolumeSpecName: "utilities") pod "ac1d9066-0e72-4070-943d-fc2b01091892" (UID: "ac1d9066-0e72-4070-943d-fc2b01091892"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.477638 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ded2f33e-57fb-4fd2-9477-92de294be838-kube-api-access-nzr55" (OuterVolumeSpecName: "kube-api-access-nzr55") pod "ded2f33e-57fb-4fd2-9477-92de294be838" (UID: "ded2f33e-57fb-4fd2-9477-92de294be838"). InnerVolumeSpecName "kube-api-access-nzr55". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: E1007 14:54:30.477935 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f\": container with ID starting with 7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f not found: ID does not exist" containerID="7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.477978 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f"} err="failed to get container status \"7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f\": rpc error: code = NotFound desc = could not find container \"7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f\": container with ID starting with 7a74f4edc9c408c24d584274516aa91967b7f110ae757f8c0cd21e740b357f4f not found: ID does not exist" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.478010 4672 scope.go:117] "RemoveContainer" containerID="e3b0171c4b475385dd42c53cc99595e4dd7b3e673d84f6bf388d843477ba3e37" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.478897 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac1d9066-0e72-4070-943d-fc2b01091892-kube-api-access-c4r72" (OuterVolumeSpecName: "kube-api-access-c4r72") pod "ac1d9066-0e72-4070-943d-fc2b01091892" (UID: "ac1d9066-0e72-4070-943d-fc2b01091892"). InnerVolumeSpecName "kube-api-access-c4r72". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.482773 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5t2r9"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.488600 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5t2r9"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.491127 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42794156-02f5-474c-8fa3-c13eb4db08b0-kube-api-access-fspz6" (OuterVolumeSpecName: "kube-api-access-fspz6") pod "42794156-02f5-474c-8fa3-c13eb4db08b0" (UID: "42794156-02f5-474c-8fa3-c13eb4db08b0"). InnerVolumeSpecName "kube-api-access-fspz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.491432 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rvvc" event={"ID":"4d331851-6416-4fa6-965c-f20fa52b7d32","Type":"ContainerDied","Data":"743fb09a0b4afae1efdd4e9c23e08c0821a73ce0283518ceff3348c8aa9bf255"} Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.491530 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9rvvc" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.493064 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ded2f33e-57fb-4fd2-9477-92de294be838" (UID: "ded2f33e-57fb-4fd2-9477-92de294be838"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.500265 4672 scope.go:117] "RemoveContainer" containerID="9932851aea50dd4bca2db7a88463b05445a97b0f628cc1c0db92b970b212ca76" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.505567 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" podStartSLOduration=1.505545781 podStartE2EDuration="1.505545781s" podCreationTimestamp="2025-10-07 14:54:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:54:30.503705216 +0000 UTC m=+347.478883817" watchObservedRunningTime="2025-10-07 14:54:30.505545781 +0000 UTC m=+347.480724372" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.520998 4672 scope.go:117] "RemoveContainer" containerID="107ea54990c87268f0c5d59eb31d54f083367cfd303131f700aca738831dafda" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.533844 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9rvvc"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.543831 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9rvvc"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.550560 4672 scope.go:117] "RemoveContainer" containerID="d647eea38bb3645a36a9994cbd7cac7a8562b99d3d2fd71c44f7167d3bca025b" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.563365 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac1d9066-0e72-4070-943d-fc2b01091892" (UID: "ac1d9066-0e72-4070-943d-fc2b01091892"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.572339 4672 scope.go:117] "RemoveContainer" containerID="55872ed8e8eed01caf5f4114fb28e29a4ffcb7938f537830c746845e6520da6f" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.576470 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzr55\" (UniqueName: \"kubernetes.io/projected/ded2f33e-57fb-4fd2-9477-92de294be838-kube-api-access-nzr55\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.576523 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.576542 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.576555 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.576568 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4r72\" (UniqueName: \"kubernetes.io/projected/ac1d9066-0e72-4070-943d-fc2b01091892-kube-api-access-c4r72\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.576582 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fspz6\" (UniqueName: \"kubernetes.io/projected/42794156-02f5-474c-8fa3-c13eb4db08b0-kube-api-access-fspz6\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.576595 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac1d9066-0e72-4070-943d-fc2b01091892-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.576609 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ded2f33e-57fb-4fd2-9477-92de294be838-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.585846 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "42794156-02f5-474c-8fa3-c13eb4db08b0" (UID: "42794156-02f5-474c-8fa3-c13eb4db08b0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.591929 4672 scope.go:117] "RemoveContainer" containerID="cdd98578ab6e6fb0fd24a62242efd59927fd4a3d5ecb9987538fcf070be74942" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.607664 4672 scope.go:117] "RemoveContainer" containerID="2837aa6bbae037780f052ba3b803d3be4645a1491b7c8ee943a74bf8eeb1af88" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.623722 4672 scope.go:117] "RemoveContainer" containerID="f4d892f0b5dca8d10fc14ef634c59c0228120defddb9b8eda01818580d9aef05" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.642133 4672 scope.go:117] "RemoveContainer" containerID="8073ad86e626bd111269c0f5f2ef3a651ce4909f87aa16453416615b0e284634" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.668410 4672 scope.go:117] "RemoveContainer" containerID="3b54722597279a1ca129a842853f32d28761ccdeb9b74f8a951c875786ed9852" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.678085 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42794156-02f5-474c-8fa3-c13eb4db08b0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.687702 4672 scope.go:117] "RemoveContainer" containerID="a81644f5f9e3368852f38eb30ba5c2df58e882a3742898baef9c50b28e9f2222" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.710010 4672 scope.go:117] "RemoveContainer" containerID="5d37a364466d54098d9a9adb36cbe7ddbcf911bd429010ecab899878f09a766a" Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.793516 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjdt6"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.797747 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjdt6"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.808216 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mrfk8"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.815003 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mrfk8"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.823007 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qgmqj"] Oct 07 14:54:30 crc kubenswrapper[4672]: I1007 14:54:30.829456 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qgmqj"] Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.331974 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kwgqv"] Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332244 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac1d9066-0e72-4070-943d-fc2b01091892" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332262 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac1d9066-0e72-4070-943d-fc2b01091892" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332275 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" containerName="marketplace-operator" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332283 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" containerName="marketplace-operator" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332299 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" containerName="extract-utilities" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332307 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" containerName="extract-utilities" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332315 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerName="extract-content" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332324 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerName="extract-content" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332347 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerName="extract-utilities" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332354 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerName="extract-utilities" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332368 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac1d9066-0e72-4070-943d-fc2b01091892" containerName="extract-content" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332375 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac1d9066-0e72-4070-943d-fc2b01091892" containerName="extract-content" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332385 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332402 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332410 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerName="extract-content" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332416 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerName="extract-content" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332426 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac1d9066-0e72-4070-943d-fc2b01091892" containerName="extract-utilities" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332432 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac1d9066-0e72-4070-943d-fc2b01091892" containerName="extract-utilities" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332439 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332445 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332451 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" containerName="extract-content" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332456 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" containerName="extract-content" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332466 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerName="extract-utilities" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332471 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerName="extract-utilities" Oct 07 14:54:31 crc kubenswrapper[4672]: E1007 14:54:31.332481 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332486 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332572 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac1d9066-0e72-4070-943d-fc2b01091892" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332586 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332593 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332603 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" containerName="registry-server" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.332610 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" containerName="marketplace-operator" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.333598 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.336391 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.349430 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwgqv"] Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.386322 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzmdb\" (UniqueName: \"kubernetes.io/projected/20da9666-7ff2-4127-afc2-a5e1b1ca402e-kube-api-access-jzmdb\") pod \"redhat-marketplace-kwgqv\" (UID: \"20da9666-7ff2-4127-afc2-a5e1b1ca402e\") " pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.386394 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20da9666-7ff2-4127-afc2-a5e1b1ca402e-catalog-content\") pod \"redhat-marketplace-kwgqv\" (UID: \"20da9666-7ff2-4127-afc2-a5e1b1ca402e\") " pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.386428 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20da9666-7ff2-4127-afc2-a5e1b1ca402e-utilities\") pod \"redhat-marketplace-kwgqv\" (UID: \"20da9666-7ff2-4127-afc2-a5e1b1ca402e\") " pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.487404 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzmdb\" (UniqueName: \"kubernetes.io/projected/20da9666-7ff2-4127-afc2-a5e1b1ca402e-kube-api-access-jzmdb\") pod \"redhat-marketplace-kwgqv\" (UID: \"20da9666-7ff2-4127-afc2-a5e1b1ca402e\") " pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.487485 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20da9666-7ff2-4127-afc2-a5e1b1ca402e-catalog-content\") pod \"redhat-marketplace-kwgqv\" (UID: \"20da9666-7ff2-4127-afc2-a5e1b1ca402e\") " pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.487515 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20da9666-7ff2-4127-afc2-a5e1b1ca402e-utilities\") pod \"redhat-marketplace-kwgqv\" (UID: \"20da9666-7ff2-4127-afc2-a5e1b1ca402e\") " pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.487952 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20da9666-7ff2-4127-afc2-a5e1b1ca402e-utilities\") pod \"redhat-marketplace-kwgqv\" (UID: \"20da9666-7ff2-4127-afc2-a5e1b1ca402e\") " pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.488109 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20da9666-7ff2-4127-afc2-a5e1b1ca402e-catalog-content\") pod \"redhat-marketplace-kwgqv\" (UID: \"20da9666-7ff2-4127-afc2-a5e1b1ca402e\") " pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.505991 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-ljjfp" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.506342 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzmdb\" (UniqueName: \"kubernetes.io/projected/20da9666-7ff2-4127-afc2-a5e1b1ca402e-kube-api-access-jzmdb\") pod \"redhat-marketplace-kwgqv\" (UID: \"20da9666-7ff2-4127-afc2-a5e1b1ca402e\") " pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.550804 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lrzlc"] Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.553948 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.557881 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lrzlc"] Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.559552 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.649625 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.689903 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk65c\" (UniqueName: \"kubernetes.io/projected/25bc32fc-0334-400e-903d-0a107454324e-kube-api-access-nk65c\") pod \"certified-operators-lrzlc\" (UID: \"25bc32fc-0334-400e-903d-0a107454324e\") " pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.690543 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25bc32fc-0334-400e-903d-0a107454324e-utilities\") pod \"certified-operators-lrzlc\" (UID: \"25bc32fc-0334-400e-903d-0a107454324e\") " pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.690675 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25bc32fc-0334-400e-903d-0a107454324e-catalog-content\") pod \"certified-operators-lrzlc\" (UID: \"25bc32fc-0334-400e-903d-0a107454324e\") " pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.791569 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk65c\" (UniqueName: \"kubernetes.io/projected/25bc32fc-0334-400e-903d-0a107454324e-kube-api-access-nk65c\") pod \"certified-operators-lrzlc\" (UID: \"25bc32fc-0334-400e-903d-0a107454324e\") " pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.791620 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25bc32fc-0334-400e-903d-0a107454324e-utilities\") pod \"certified-operators-lrzlc\" (UID: \"25bc32fc-0334-400e-903d-0a107454324e\") " pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.791648 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25bc32fc-0334-400e-903d-0a107454324e-catalog-content\") pod \"certified-operators-lrzlc\" (UID: \"25bc32fc-0334-400e-903d-0a107454324e\") " pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.792157 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25bc32fc-0334-400e-903d-0a107454324e-catalog-content\") pod \"certified-operators-lrzlc\" (UID: \"25bc32fc-0334-400e-903d-0a107454324e\") " pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.792416 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25bc32fc-0334-400e-903d-0a107454324e-utilities\") pod \"certified-operators-lrzlc\" (UID: \"25bc32fc-0334-400e-903d-0a107454324e\") " pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.815687 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk65c\" (UniqueName: \"kubernetes.io/projected/25bc32fc-0334-400e-903d-0a107454324e-kube-api-access-nk65c\") pod \"certified-operators-lrzlc\" (UID: \"25bc32fc-0334-400e-903d-0a107454324e\") " pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.879807 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.899473 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42794156-02f5-474c-8fa3-c13eb4db08b0" path="/var/lib/kubelet/pods/42794156-02f5-474c-8fa3-c13eb4db08b0/volumes" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.900263 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d331851-6416-4fa6-965c-f20fa52b7d32" path="/var/lib/kubelet/pods/4d331851-6416-4fa6-965c-f20fa52b7d32/volumes" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.900900 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac1d9066-0e72-4070-943d-fc2b01091892" path="/var/lib/kubelet/pods/ac1d9066-0e72-4070-943d-fc2b01091892/volumes" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.901935 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ded2f33e-57fb-4fd2-9477-92de294be838" path="/var/lib/kubelet/pods/ded2f33e-57fb-4fd2-9477-92de294be838/volumes" Oct 07 14:54:31 crc kubenswrapper[4672]: I1007 14:54:31.902556 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0" path="/var/lib/kubelet/pods/f5715dd2-f3be-4f8d-a4a3-c7d0ce56abc0/volumes" Oct 07 14:54:32 crc kubenswrapper[4672]: I1007 14:54:32.033553 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwgqv"] Oct 07 14:54:32 crc kubenswrapper[4672]: W1007 14:54:32.040249 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20da9666_7ff2_4127_afc2_a5e1b1ca402e.slice/crio-ae23b289356d6ed91ae819213cbc42b1fcd4e31ea2101ecece56ec7f17e62d0f WatchSource:0}: Error finding container ae23b289356d6ed91ae819213cbc42b1fcd4e31ea2101ecece56ec7f17e62d0f: Status 404 returned error can't find the container with id ae23b289356d6ed91ae819213cbc42b1fcd4e31ea2101ecece56ec7f17e62d0f Oct 07 14:54:32 crc kubenswrapper[4672]: I1007 14:54:32.101707 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lrzlc"] Oct 07 14:54:32 crc kubenswrapper[4672]: W1007 14:54:32.110966 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25bc32fc_0334_400e_903d_0a107454324e.slice/crio-5ae4a67e209b71400a674d530edc5232228751f1274a57928fe9df4529ea56bc WatchSource:0}: Error finding container 5ae4a67e209b71400a674d530edc5232228751f1274a57928fe9df4529ea56bc: Status 404 returned error can't find the container with id 5ae4a67e209b71400a674d530edc5232228751f1274a57928fe9df4529ea56bc Oct 07 14:54:32 crc kubenswrapper[4672]: I1007 14:54:32.509856 4672 generic.go:334] "Generic (PLEG): container finished" podID="20da9666-7ff2-4127-afc2-a5e1b1ca402e" containerID="56b948255c4e88b9d4e526945d6cedc651c895c00dcb13587122ecd757b9a860" exitCode=0 Oct 07 14:54:32 crc kubenswrapper[4672]: I1007 14:54:32.509905 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwgqv" event={"ID":"20da9666-7ff2-4127-afc2-a5e1b1ca402e","Type":"ContainerDied","Data":"56b948255c4e88b9d4e526945d6cedc651c895c00dcb13587122ecd757b9a860"} Oct 07 14:54:32 crc kubenswrapper[4672]: I1007 14:54:32.509981 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwgqv" event={"ID":"20da9666-7ff2-4127-afc2-a5e1b1ca402e","Type":"ContainerStarted","Data":"ae23b289356d6ed91ae819213cbc42b1fcd4e31ea2101ecece56ec7f17e62d0f"} Oct 07 14:54:32 crc kubenswrapper[4672]: I1007 14:54:32.514155 4672 generic.go:334] "Generic (PLEG): container finished" podID="25bc32fc-0334-400e-903d-0a107454324e" containerID="0e1a5d66090689588c85118613561333306e848c2e3e629a7f03f5e2f8c2c48c" exitCode=0 Oct 07 14:54:32 crc kubenswrapper[4672]: I1007 14:54:32.514208 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrzlc" event={"ID":"25bc32fc-0334-400e-903d-0a107454324e","Type":"ContainerDied","Data":"0e1a5d66090689588c85118613561333306e848c2e3e629a7f03f5e2f8c2c48c"} Oct 07 14:54:32 crc kubenswrapper[4672]: I1007 14:54:32.514255 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrzlc" event={"ID":"25bc32fc-0334-400e-903d-0a107454324e","Type":"ContainerStarted","Data":"5ae4a67e209b71400a674d530edc5232228751f1274a57928fe9df4529ea56bc"} Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.521718 4672 generic.go:334] "Generic (PLEG): container finished" podID="20da9666-7ff2-4127-afc2-a5e1b1ca402e" containerID="07c36236a5a3c34baf8f8dc9be51da28f3ad4330590c03bf9119347d2a1dc14b" exitCode=0 Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.521801 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwgqv" event={"ID":"20da9666-7ff2-4127-afc2-a5e1b1ca402e","Type":"ContainerDied","Data":"07c36236a5a3c34baf8f8dc9be51da28f3ad4330590c03bf9119347d2a1dc14b"} Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.731926 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kr5mt"] Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.733005 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.734430 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.745384 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kr5mt"] Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.924905 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-utilities\") pod \"redhat-operators-kr5mt\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.924964 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-catalog-content\") pod \"redhat-operators-kr5mt\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.924990 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2j4p\" (UniqueName: \"kubernetes.io/projected/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-kube-api-access-f2j4p\") pod \"redhat-operators-kr5mt\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.933187 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9pg96"] Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.936467 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.939030 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 07 14:54:33 crc kubenswrapper[4672]: I1007 14:54:33.940124 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9pg96"] Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.025529 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-catalog-content\") pod \"redhat-operators-kr5mt\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.025579 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2j4p\" (UniqueName: \"kubernetes.io/projected/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-kube-api-access-f2j4p\") pod \"redhat-operators-kr5mt\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.025814 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-utilities\") pod \"redhat-operators-kr5mt\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.026110 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-catalog-content\") pod \"redhat-operators-kr5mt\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.026259 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-utilities\") pod \"redhat-operators-kr5mt\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.045151 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2j4p\" (UniqueName: \"kubernetes.io/projected/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-kube-api-access-f2j4p\") pod \"redhat-operators-kr5mt\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.046777 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.126400 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-catalog-content\") pod \"community-operators-9pg96\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.126688 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw9pr\" (UniqueName: \"kubernetes.io/projected/ae974611-9a8a-42b1-8406-5f532debaab1-kube-api-access-hw9pr\") pod \"community-operators-9pg96\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.126753 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-utilities\") pod \"community-operators-9pg96\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.218699 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kr5mt"] Oct 07 14:54:34 crc kubenswrapper[4672]: W1007 14:54:34.224859 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b1bc29c_f45a_4070_8fbb_e58c56ce448d.slice/crio-557369d490f49814c01135a60ae0761db819fbc4b6f14b7a0af2e63c87a02dc6 WatchSource:0}: Error finding container 557369d490f49814c01135a60ae0761db819fbc4b6f14b7a0af2e63c87a02dc6: Status 404 returned error can't find the container with id 557369d490f49814c01135a60ae0761db819fbc4b6f14b7a0af2e63c87a02dc6 Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.227773 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-catalog-content\") pod \"community-operators-9pg96\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.227816 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw9pr\" (UniqueName: \"kubernetes.io/projected/ae974611-9a8a-42b1-8406-5f532debaab1-kube-api-access-hw9pr\") pod \"community-operators-9pg96\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.227843 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-utilities\") pod \"community-operators-9pg96\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.228214 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-utilities\") pod \"community-operators-9pg96\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.228325 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-catalog-content\") pod \"community-operators-9pg96\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.249978 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw9pr\" (UniqueName: \"kubernetes.io/projected/ae974611-9a8a-42b1-8406-5f532debaab1-kube-api-access-hw9pr\") pod \"community-operators-9pg96\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.254363 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.497780 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9pg96"] Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.534711 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pg96" event={"ID":"ae974611-9a8a-42b1-8406-5f532debaab1","Type":"ContainerStarted","Data":"dcdb913afada179889244037da43249b16fa7eff56f17c156bf028c2839a3a6e"} Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.537391 4672 generic.go:334] "Generic (PLEG): container finished" podID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerID="2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a" exitCode=0 Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.537500 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr5mt" event={"ID":"7b1bc29c-f45a-4070-8fbb-e58c56ce448d","Type":"ContainerDied","Data":"2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a"} Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.537547 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr5mt" event={"ID":"7b1bc29c-f45a-4070-8fbb-e58c56ce448d","Type":"ContainerStarted","Data":"557369d490f49814c01135a60ae0761db819fbc4b6f14b7a0af2e63c87a02dc6"} Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.539451 4672 generic.go:334] "Generic (PLEG): container finished" podID="25bc32fc-0334-400e-903d-0a107454324e" containerID="f1e5d05b41f5ee03d3842e7c8cf26d18c199c9e584a11de1087dceb4c743d6ca" exitCode=0 Oct 07 14:54:34 crc kubenswrapper[4672]: I1007 14:54:34.539493 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrzlc" event={"ID":"25bc32fc-0334-400e-903d-0a107454324e","Type":"ContainerDied","Data":"f1e5d05b41f5ee03d3842e7c8cf26d18c199c9e584a11de1087dceb4c743d6ca"} Oct 07 14:54:35 crc kubenswrapper[4672]: I1007 14:54:35.546186 4672 generic.go:334] "Generic (PLEG): container finished" podID="ae974611-9a8a-42b1-8406-5f532debaab1" containerID="fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c" exitCode=0 Oct 07 14:54:35 crc kubenswrapper[4672]: I1007 14:54:35.546290 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pg96" event={"ID":"ae974611-9a8a-42b1-8406-5f532debaab1","Type":"ContainerDied","Data":"fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c"} Oct 07 14:54:35 crc kubenswrapper[4672]: I1007 14:54:35.548844 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwgqv" event={"ID":"20da9666-7ff2-4127-afc2-a5e1b1ca402e","Type":"ContainerStarted","Data":"f3aa46db5942a14bd9c567dc0e2773d16c1ea42a245c75343892149be7766c85"} Oct 07 14:54:35 crc kubenswrapper[4672]: I1007 14:54:35.582521 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kwgqv" podStartSLOduration=2.570247025 podStartE2EDuration="4.58250047s" podCreationTimestamp="2025-10-07 14:54:31 +0000 UTC" firstStartedPulling="2025-10-07 14:54:32.511699496 +0000 UTC m=+349.486878077" lastFinishedPulling="2025-10-07 14:54:34.523952941 +0000 UTC m=+351.499131522" observedRunningTime="2025-10-07 14:54:35.582078258 +0000 UTC m=+352.557256849" watchObservedRunningTime="2025-10-07 14:54:35.58250047 +0000 UTC m=+352.557679061" Oct 07 14:54:36 crc kubenswrapper[4672]: I1007 14:54:36.556957 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrzlc" event={"ID":"25bc32fc-0334-400e-903d-0a107454324e","Type":"ContainerStarted","Data":"afeb27174aca9fa67707c5f0fd8161bcac74d8823789f643a729eeb5474b6f1a"} Oct 07 14:54:36 crc kubenswrapper[4672]: I1007 14:54:36.559609 4672 generic.go:334] "Generic (PLEG): container finished" podID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerID="dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b" exitCode=0 Oct 07 14:54:36 crc kubenswrapper[4672]: I1007 14:54:36.560582 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr5mt" event={"ID":"7b1bc29c-f45a-4070-8fbb-e58c56ce448d","Type":"ContainerDied","Data":"dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b"} Oct 07 14:54:36 crc kubenswrapper[4672]: I1007 14:54:36.581062 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lrzlc" podStartSLOduration=2.641085843 podStartE2EDuration="5.581009073s" podCreationTimestamp="2025-10-07 14:54:31 +0000 UTC" firstStartedPulling="2025-10-07 14:54:32.517705664 +0000 UTC m=+349.492884245" lastFinishedPulling="2025-10-07 14:54:35.457628894 +0000 UTC m=+352.432807475" observedRunningTime="2025-10-07 14:54:36.580675553 +0000 UTC m=+353.555854134" watchObservedRunningTime="2025-10-07 14:54:36.581009073 +0000 UTC m=+353.556187654" Oct 07 14:54:38 crc kubenswrapper[4672]: I1007 14:54:38.575252 4672 generic.go:334] "Generic (PLEG): container finished" podID="ae974611-9a8a-42b1-8406-5f532debaab1" containerID="b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba" exitCode=0 Oct 07 14:54:38 crc kubenswrapper[4672]: I1007 14:54:38.575353 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pg96" event={"ID":"ae974611-9a8a-42b1-8406-5f532debaab1","Type":"ContainerDied","Data":"b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba"} Oct 07 14:54:38 crc kubenswrapper[4672]: I1007 14:54:38.580226 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr5mt" event={"ID":"7b1bc29c-f45a-4070-8fbb-e58c56ce448d","Type":"ContainerStarted","Data":"fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4"} Oct 07 14:54:38 crc kubenswrapper[4672]: I1007 14:54:38.611116 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kr5mt" podStartSLOduration=2.917989669 podStartE2EDuration="5.611095185s" podCreationTimestamp="2025-10-07 14:54:33 +0000 UTC" firstStartedPulling="2025-10-07 14:54:34.548821547 +0000 UTC m=+351.524000128" lastFinishedPulling="2025-10-07 14:54:37.241927063 +0000 UTC m=+354.217105644" observedRunningTime="2025-10-07 14:54:38.609751536 +0000 UTC m=+355.584930127" watchObservedRunningTime="2025-10-07 14:54:38.611095185 +0000 UTC m=+355.586273766" Oct 07 14:54:40 crc kubenswrapper[4672]: I1007 14:54:40.591874 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pg96" event={"ID":"ae974611-9a8a-42b1-8406-5f532debaab1","Type":"ContainerStarted","Data":"2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75"} Oct 07 14:54:41 crc kubenswrapper[4672]: I1007 14:54:41.650441 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:41 crc kubenswrapper[4672]: I1007 14:54:41.651837 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:41 crc kubenswrapper[4672]: I1007 14:54:41.699820 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:41 crc kubenswrapper[4672]: I1007 14:54:41.714283 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9pg96" podStartSLOduration=4.872836296 podStartE2EDuration="8.714266548s" podCreationTimestamp="2025-10-07 14:54:33 +0000 UTC" firstStartedPulling="2025-10-07 14:54:35.60344558 +0000 UTC m=+352.578624161" lastFinishedPulling="2025-10-07 14:54:39.444875822 +0000 UTC m=+356.420054413" observedRunningTime="2025-10-07 14:54:40.614306653 +0000 UTC m=+357.589485234" watchObservedRunningTime="2025-10-07 14:54:41.714266548 +0000 UTC m=+358.689445129" Oct 07 14:54:41 crc kubenswrapper[4672]: I1007 14:54:41.880810 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:41 crc kubenswrapper[4672]: I1007 14:54:41.880878 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:41 crc kubenswrapper[4672]: I1007 14:54:41.916558 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:42 crc kubenswrapper[4672]: I1007 14:54:42.641599 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kwgqv" Oct 07 14:54:42 crc kubenswrapper[4672]: I1007 14:54:42.647197 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lrzlc" Oct 07 14:54:44 crc kubenswrapper[4672]: I1007 14:54:44.047265 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:44 crc kubenswrapper[4672]: I1007 14:54:44.047554 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:44 crc kubenswrapper[4672]: I1007 14:54:44.082565 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:44 crc kubenswrapper[4672]: I1007 14:54:44.255002 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:44 crc kubenswrapper[4672]: I1007 14:54:44.255071 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:44 crc kubenswrapper[4672]: I1007 14:54:44.288060 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:44 crc kubenswrapper[4672]: I1007 14:54:44.652776 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 14:54:44 crc kubenswrapper[4672]: I1007 14:54:44.653002 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9pg96" Oct 07 14:54:56 crc kubenswrapper[4672]: I1007 14:54:56.651046 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 14:54:56 crc kubenswrapper[4672]: I1007 14:54:56.651602 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 14:55:26 crc kubenswrapper[4672]: I1007 14:55:26.651159 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 14:55:26 crc kubenswrapper[4672]: I1007 14:55:26.651988 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 14:55:56 crc kubenswrapper[4672]: I1007 14:55:56.650778 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 14:55:56 crc kubenswrapper[4672]: I1007 14:55:56.651242 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 14:55:56 crc kubenswrapper[4672]: I1007 14:55:56.651325 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:55:56 crc kubenswrapper[4672]: I1007 14:55:56.651945 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f6fbe1825093d14900923be095641b75721f8bdd2190a6bd05844e8208070c4f"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 14:55:56 crc kubenswrapper[4672]: I1007 14:55:56.652035 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://f6fbe1825093d14900923be095641b75721f8bdd2190a6bd05844e8208070c4f" gracePeriod=600 Oct 07 14:55:57 crc kubenswrapper[4672]: I1007 14:55:57.006393 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="f6fbe1825093d14900923be095641b75721f8bdd2190a6bd05844e8208070c4f" exitCode=0 Oct 07 14:55:57 crc kubenswrapper[4672]: I1007 14:55:57.006460 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"f6fbe1825093d14900923be095641b75721f8bdd2190a6bd05844e8208070c4f"} Oct 07 14:55:57 crc kubenswrapper[4672]: I1007 14:55:57.006499 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"cdc0c1ec78915bfb28078432a0c6c8760aad745b9a483755c11b6137150bc91e"} Oct 07 14:55:57 crc kubenswrapper[4672]: I1007 14:55:57.006520 4672 scope.go:117] "RemoveContainer" containerID="f6dd5535c9d9358deb42b31d0a9d9ef70f26f44061c3abe7ee94d014e828827d" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.686743 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-m8vrw"] Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.687987 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.706795 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-m8vrw"] Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.801807 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59fb5d41-1255-4f4d-9076-679f5eafda44-registry-tls\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.801885 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59fb5d41-1255-4f4d-9076-679f5eafda44-registry-certificates\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.801944 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59fb5d41-1255-4f4d-9076-679f5eafda44-trusted-ca\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.802087 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59fb5d41-1255-4f4d-9076-679f5eafda44-installation-pull-secrets\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.802239 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.802299 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59fb5d41-1255-4f4d-9076-679f5eafda44-bound-sa-token\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.802336 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59fb5d41-1255-4f4d-9076-679f5eafda44-ca-trust-extracted\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.802384 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct42w\" (UniqueName: \"kubernetes.io/projected/59fb5d41-1255-4f4d-9076-679f5eafda44-kube-api-access-ct42w\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.825155 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.903136 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59fb5d41-1255-4f4d-9076-679f5eafda44-registry-certificates\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.903196 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59fb5d41-1255-4f4d-9076-679f5eafda44-trusted-ca\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.903230 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59fb5d41-1255-4f4d-9076-679f5eafda44-installation-pull-secrets\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.903283 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59fb5d41-1255-4f4d-9076-679f5eafda44-bound-sa-token\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.903309 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59fb5d41-1255-4f4d-9076-679f5eafda44-ca-trust-extracted\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.903334 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct42w\" (UniqueName: \"kubernetes.io/projected/59fb5d41-1255-4f4d-9076-679f5eafda44-kube-api-access-ct42w\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.903359 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59fb5d41-1255-4f4d-9076-679f5eafda44-registry-tls\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.904091 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59fb5d41-1255-4f4d-9076-679f5eafda44-ca-trust-extracted\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.904826 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59fb5d41-1255-4f4d-9076-679f5eafda44-registry-certificates\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.904981 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59fb5d41-1255-4f4d-9076-679f5eafda44-trusted-ca\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.910336 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59fb5d41-1255-4f4d-9076-679f5eafda44-installation-pull-secrets\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.913626 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59fb5d41-1255-4f4d-9076-679f5eafda44-registry-tls\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.920339 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct42w\" (UniqueName: \"kubernetes.io/projected/59fb5d41-1255-4f4d-9076-679f5eafda44-kube-api-access-ct42w\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:37 crc kubenswrapper[4672]: I1007 14:56:37.923829 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59fb5d41-1255-4f4d-9076-679f5eafda44-bound-sa-token\") pod \"image-registry-66df7c8f76-m8vrw\" (UID: \"59fb5d41-1255-4f4d-9076-679f5eafda44\") " pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:38 crc kubenswrapper[4672]: I1007 14:56:38.007129 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:38 crc kubenswrapper[4672]: I1007 14:56:38.176326 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-m8vrw"] Oct 07 14:56:38 crc kubenswrapper[4672]: I1007 14:56:38.233931 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" event={"ID":"59fb5d41-1255-4f4d-9076-679f5eafda44","Type":"ContainerStarted","Data":"9901612b40da2ac6a4c9daa286f4f2afd4a3db6c5560d282f616956d60848617"} Oct 07 14:56:39 crc kubenswrapper[4672]: I1007 14:56:39.240873 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" event={"ID":"59fb5d41-1255-4f4d-9076-679f5eafda44","Type":"ContainerStarted","Data":"1fb4ddeca1280159f3fee59dc2c2d87fc55356289e9ebac04d00ed9f5c397406"} Oct 07 14:56:39 crc kubenswrapper[4672]: I1007 14:56:39.242204 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:39 crc kubenswrapper[4672]: I1007 14:56:39.260455 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" podStartSLOduration=2.2604275 podStartE2EDuration="2.2604275s" podCreationTimestamp="2025-10-07 14:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 14:56:39.260201724 +0000 UTC m=+476.235380315" watchObservedRunningTime="2025-10-07 14:56:39.2604275 +0000 UTC m=+476.235606081" Oct 07 14:56:58 crc kubenswrapper[4672]: I1007 14:56:58.013165 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-m8vrw" Oct 07 14:56:58 crc kubenswrapper[4672]: I1007 14:56:58.069227 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sqktl"] Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.104666 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" podUID="eed23ae9-82ee-4db9-b353-70033e17e039" containerName="registry" containerID="cri-o://a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833" gracePeriod=30 Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.428527 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.464012 4672 generic.go:334] "Generic (PLEG): container finished" podID="eed23ae9-82ee-4db9-b353-70033e17e039" containerID="a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833" exitCode=0 Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.464077 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.464094 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" event={"ID":"eed23ae9-82ee-4db9-b353-70033e17e039","Type":"ContainerDied","Data":"a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833"} Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.464208 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-sqktl" event={"ID":"eed23ae9-82ee-4db9-b353-70033e17e039","Type":"ContainerDied","Data":"ab8078d6ba50465d355e1bbb320b329dbe7103303c2a8eedb0c4194fdc0c0421"} Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.464226 4672 scope.go:117] "RemoveContainer" containerID="a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.487117 4672 scope.go:117] "RemoveContainer" containerID="a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833" Oct 07 14:57:23 crc kubenswrapper[4672]: E1007 14:57:23.487618 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833\": container with ID starting with a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833 not found: ID does not exist" containerID="a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.487651 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833"} err="failed to get container status \"a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833\": rpc error: code = NotFound desc = could not find container \"a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833\": container with ID starting with a233a3bade9f1cb0c1544ccedbf9652c41ab9e5c10c3782a8b629df6ebed8833 not found: ID does not exist" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.617241 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eed23ae9-82ee-4db9-b353-70033e17e039-installation-pull-secrets\") pod \"eed23ae9-82ee-4db9-b353-70033e17e039\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.617329 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-bound-sa-token\") pod \"eed23ae9-82ee-4db9-b353-70033e17e039\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.617405 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2qz7\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-kube-api-access-d2qz7\") pod \"eed23ae9-82ee-4db9-b353-70033e17e039\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.617461 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-registry-certificates\") pod \"eed23ae9-82ee-4db9-b353-70033e17e039\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.617492 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-trusted-ca\") pod \"eed23ae9-82ee-4db9-b353-70033e17e039\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.617542 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-registry-tls\") pod \"eed23ae9-82ee-4db9-b353-70033e17e039\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.617630 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eed23ae9-82ee-4db9-b353-70033e17e039-ca-trust-extracted\") pod \"eed23ae9-82ee-4db9-b353-70033e17e039\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.617996 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"eed23ae9-82ee-4db9-b353-70033e17e039\" (UID: \"eed23ae9-82ee-4db9-b353-70033e17e039\") " Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.618593 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "eed23ae9-82ee-4db9-b353-70033e17e039" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.619302 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "eed23ae9-82ee-4db9-b353-70033e17e039" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.624326 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-kube-api-access-d2qz7" (OuterVolumeSpecName: "kube-api-access-d2qz7") pod "eed23ae9-82ee-4db9-b353-70033e17e039" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039"). InnerVolumeSpecName "kube-api-access-d2qz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.625119 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eed23ae9-82ee-4db9-b353-70033e17e039-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "eed23ae9-82ee-4db9-b353-70033e17e039" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.625322 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "eed23ae9-82ee-4db9-b353-70033e17e039" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.626268 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "eed23ae9-82ee-4db9-b353-70033e17e039" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.630241 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "eed23ae9-82ee-4db9-b353-70033e17e039" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.637656 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eed23ae9-82ee-4db9-b353-70033e17e039-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "eed23ae9-82ee-4db9-b353-70033e17e039" (UID: "eed23ae9-82ee-4db9-b353-70033e17e039"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.720230 4672 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eed23ae9-82ee-4db9-b353-70033e17e039-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.720315 4672 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.720328 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2qz7\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-kube-api-access-d2qz7\") on node \"crc\" DevicePath \"\"" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.720345 4672 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.720363 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eed23ae9-82ee-4db9-b353-70033e17e039-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.720375 4672 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eed23ae9-82ee-4db9-b353-70033e17e039-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.720387 4672 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eed23ae9-82ee-4db9-b353-70033e17e039-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.794987 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sqktl"] Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.799360 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sqktl"] Oct 07 14:57:23 crc kubenswrapper[4672]: I1007 14:57:23.901081 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eed23ae9-82ee-4db9-b353-70033e17e039" path="/var/lib/kubelet/pods/eed23ae9-82ee-4db9-b353-70033e17e039/volumes" Oct 07 14:57:56 crc kubenswrapper[4672]: I1007 14:57:56.650808 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 14:57:56 crc kubenswrapper[4672]: I1007 14:57:56.652204 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 14:58:26 crc kubenswrapper[4672]: I1007 14:58:26.650871 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 14:58:26 crc kubenswrapper[4672]: I1007 14:58:26.651417 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 14:58:56 crc kubenswrapper[4672]: I1007 14:58:56.650332 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 14:58:56 crc kubenswrapper[4672]: I1007 14:58:56.650843 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 14:58:56 crc kubenswrapper[4672]: I1007 14:58:56.650891 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 14:58:56 crc kubenswrapper[4672]: I1007 14:58:56.651383 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cdc0c1ec78915bfb28078432a0c6c8760aad745b9a483755c11b6137150bc91e"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 14:58:56 crc kubenswrapper[4672]: I1007 14:58:56.651434 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://cdc0c1ec78915bfb28078432a0c6c8760aad745b9a483755c11b6137150bc91e" gracePeriod=600 Oct 07 14:58:56 crc kubenswrapper[4672]: I1007 14:58:56.904264 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="cdc0c1ec78915bfb28078432a0c6c8760aad745b9a483755c11b6137150bc91e" exitCode=0 Oct 07 14:58:56 crc kubenswrapper[4672]: I1007 14:58:56.904313 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"cdc0c1ec78915bfb28078432a0c6c8760aad745b9a483755c11b6137150bc91e"} Oct 07 14:58:56 crc kubenswrapper[4672]: I1007 14:58:56.904344 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"e7f37a84632604d116b2f44fdb47fdd21bdcd9295b5577b67f25b2f2a0425bdb"} Oct 07 14:58:56 crc kubenswrapper[4672]: I1007 14:58:56.904363 4672 scope.go:117] "RemoveContainer" containerID="f6fbe1825093d14900923be095641b75721f8bdd2190a6bd05844e8208070c4f" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.148409 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx"] Oct 07 15:00:00 crc kubenswrapper[4672]: E1007 15:00:00.150124 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eed23ae9-82ee-4db9-b353-70033e17e039" containerName="registry" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.150222 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="eed23ae9-82ee-4db9-b353-70033e17e039" containerName="registry" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.150443 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="eed23ae9-82ee-4db9-b353-70033e17e039" containerName="registry" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.150890 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx"] Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.151073 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.185283 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.186761 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.232260 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9dac1f5d-9a0e-469d-9072-c200b51d991a-secret-volume\") pod \"collect-profiles-29330820-bwmjx\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.232526 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9dac1f5d-9a0e-469d-9072-c200b51d991a-config-volume\") pod \"collect-profiles-29330820-bwmjx\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.232611 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmzd8\" (UniqueName: \"kubernetes.io/projected/9dac1f5d-9a0e-469d-9072-c200b51d991a-kube-api-access-qmzd8\") pod \"collect-profiles-29330820-bwmjx\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.333822 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9dac1f5d-9a0e-469d-9072-c200b51d991a-secret-volume\") pod \"collect-profiles-29330820-bwmjx\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.333914 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9dac1f5d-9a0e-469d-9072-c200b51d991a-config-volume\") pod \"collect-profiles-29330820-bwmjx\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.333938 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmzd8\" (UniqueName: \"kubernetes.io/projected/9dac1f5d-9a0e-469d-9072-c200b51d991a-kube-api-access-qmzd8\") pod \"collect-profiles-29330820-bwmjx\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.335128 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9dac1f5d-9a0e-469d-9072-c200b51d991a-config-volume\") pod \"collect-profiles-29330820-bwmjx\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.339216 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9dac1f5d-9a0e-469d-9072-c200b51d991a-secret-volume\") pod \"collect-profiles-29330820-bwmjx\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.349866 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmzd8\" (UniqueName: \"kubernetes.io/projected/9dac1f5d-9a0e-469d-9072-c200b51d991a-kube-api-access-qmzd8\") pod \"collect-profiles-29330820-bwmjx\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.509294 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:00 crc kubenswrapper[4672]: I1007 15:00:00.694241 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx"] Oct 07 15:00:01 crc kubenswrapper[4672]: I1007 15:00:01.226465 4672 generic.go:334] "Generic (PLEG): container finished" podID="9dac1f5d-9a0e-469d-9072-c200b51d991a" containerID="90bbad566c354bf22328a2f475a13d16cdfb54033dfc306b3f388276a703c378" exitCode=0 Oct 07 15:00:01 crc kubenswrapper[4672]: I1007 15:00:01.226650 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" event={"ID":"9dac1f5d-9a0e-469d-9072-c200b51d991a","Type":"ContainerDied","Data":"90bbad566c354bf22328a2f475a13d16cdfb54033dfc306b3f388276a703c378"} Oct 07 15:00:01 crc kubenswrapper[4672]: I1007 15:00:01.226806 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" event={"ID":"9dac1f5d-9a0e-469d-9072-c200b51d991a","Type":"ContainerStarted","Data":"47700cc30faf844c066832b1ca2a08522a85e0938dcf119a3f3f73edc1d87314"} Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.430725 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.563562 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9dac1f5d-9a0e-469d-9072-c200b51d991a-config-volume\") pod \"9dac1f5d-9a0e-469d-9072-c200b51d991a\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.563631 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmzd8\" (UniqueName: \"kubernetes.io/projected/9dac1f5d-9a0e-469d-9072-c200b51d991a-kube-api-access-qmzd8\") pod \"9dac1f5d-9a0e-469d-9072-c200b51d991a\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.563676 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9dac1f5d-9a0e-469d-9072-c200b51d991a-secret-volume\") pod \"9dac1f5d-9a0e-469d-9072-c200b51d991a\" (UID: \"9dac1f5d-9a0e-469d-9072-c200b51d991a\") " Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.565129 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dac1f5d-9a0e-469d-9072-c200b51d991a-config-volume" (OuterVolumeSpecName: "config-volume") pod "9dac1f5d-9a0e-469d-9072-c200b51d991a" (UID: "9dac1f5d-9a0e-469d-9072-c200b51d991a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.568685 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dac1f5d-9a0e-469d-9072-c200b51d991a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9dac1f5d-9a0e-469d-9072-c200b51d991a" (UID: "9dac1f5d-9a0e-469d-9072-c200b51d991a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.569009 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dac1f5d-9a0e-469d-9072-c200b51d991a-kube-api-access-qmzd8" (OuterVolumeSpecName: "kube-api-access-qmzd8") pod "9dac1f5d-9a0e-469d-9072-c200b51d991a" (UID: "9dac1f5d-9a0e-469d-9072-c200b51d991a"). InnerVolumeSpecName "kube-api-access-qmzd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.664889 4672 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9dac1f5d-9a0e-469d-9072-c200b51d991a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.664938 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmzd8\" (UniqueName: \"kubernetes.io/projected/9dac1f5d-9a0e-469d-9072-c200b51d991a-kube-api-access-qmzd8\") on node \"crc\" DevicePath \"\"" Oct 07 15:00:02 crc kubenswrapper[4672]: I1007 15:00:02.664953 4672 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9dac1f5d-9a0e-469d-9072-c200b51d991a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 15:00:03 crc kubenswrapper[4672]: I1007 15:00:03.237910 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" event={"ID":"9dac1f5d-9a0e-469d-9072-c200b51d991a","Type":"ContainerDied","Data":"47700cc30faf844c066832b1ca2a08522a85e0938dcf119a3f3f73edc1d87314"} Oct 07 15:00:03 crc kubenswrapper[4672]: I1007 15:00:03.237963 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47700cc30faf844c066832b1ca2a08522a85e0938dcf119a3f3f73edc1d87314" Oct 07 15:00:03 crc kubenswrapper[4672]: I1007 15:00:03.237988 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx" Oct 07 15:00:56 crc kubenswrapper[4672]: I1007 15:00:56.650160 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:00:56 crc kubenswrapper[4672]: I1007 15:00:56.650582 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.283797 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rsb29"] Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.284597 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" podUID="46244c53-c30f-4b0d-9e2f-873bc7c7c660" containerName="controller-manager" containerID="cri-o://0a9323c40879beb5fd465833a7653849c7e4e986dfac7ef28294d0d83e93ea84" gracePeriod=30 Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.384435 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb"] Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.384700 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" podUID="1f715b04-a647-46de-8588-bff20df5820f" containerName="route-controller-manager" containerID="cri-o://452b3bbe9b67bc4733f105dea725f73ca3c05aadb302112f62b7d45eda976ec5" gracePeriod=30 Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.553312 4672 generic.go:334] "Generic (PLEG): container finished" podID="1f715b04-a647-46de-8588-bff20df5820f" containerID="452b3bbe9b67bc4733f105dea725f73ca3c05aadb302112f62b7d45eda976ec5" exitCode=0 Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.553598 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" event={"ID":"1f715b04-a647-46de-8588-bff20df5820f","Type":"ContainerDied","Data":"452b3bbe9b67bc4733f105dea725f73ca3c05aadb302112f62b7d45eda976ec5"} Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.554570 4672 generic.go:334] "Generic (PLEG): container finished" podID="46244c53-c30f-4b0d-9e2f-873bc7c7c660" containerID="0a9323c40879beb5fd465833a7653849c7e4e986dfac7ef28294d0d83e93ea84" exitCode=0 Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.554615 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" event={"ID":"46244c53-c30f-4b0d-9e2f-873bc7c7c660","Type":"ContainerDied","Data":"0a9323c40879beb5fd465833a7653849c7e4e986dfac7ef28294d0d83e93ea84"} Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.633148 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.712215 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.793576 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-proxy-ca-bundles\") pod \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.793620 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-config\") pod \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.793675 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n8dg\" (UniqueName: \"kubernetes.io/projected/46244c53-c30f-4b0d-9e2f-873bc7c7c660-kube-api-access-8n8dg\") pod \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.793720 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46244c53-c30f-4b0d-9e2f-873bc7c7c660-serving-cert\") pod \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.793762 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-client-ca\") pod \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\" (UID: \"46244c53-c30f-4b0d-9e2f-873bc7c7c660\") " Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.794443 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "46244c53-c30f-4b0d-9e2f-873bc7c7c660" (UID: "46244c53-c30f-4b0d-9e2f-873bc7c7c660"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.794453 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-client-ca" (OuterVolumeSpecName: "client-ca") pod "46244c53-c30f-4b0d-9e2f-873bc7c7c660" (UID: "46244c53-c30f-4b0d-9e2f-873bc7c7c660"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.794575 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-config" (OuterVolumeSpecName: "config") pod "46244c53-c30f-4b0d-9e2f-873bc7c7c660" (UID: "46244c53-c30f-4b0d-9e2f-873bc7c7c660"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.801035 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46244c53-c30f-4b0d-9e2f-873bc7c7c660-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "46244c53-c30f-4b0d-9e2f-873bc7c7c660" (UID: "46244c53-c30f-4b0d-9e2f-873bc7c7c660"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.801115 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46244c53-c30f-4b0d-9e2f-873bc7c7c660-kube-api-access-8n8dg" (OuterVolumeSpecName: "kube-api-access-8n8dg") pod "46244c53-c30f-4b0d-9e2f-873bc7c7c660" (UID: "46244c53-c30f-4b0d-9e2f-873bc7c7c660"). InnerVolumeSpecName "kube-api-access-8n8dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894335 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-config\") pod \"1f715b04-a647-46de-8588-bff20df5820f\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894407 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-client-ca\") pod \"1f715b04-a647-46de-8588-bff20df5820f\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894434 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbhlw\" (UniqueName: \"kubernetes.io/projected/1f715b04-a647-46de-8588-bff20df5820f-kube-api-access-qbhlw\") pod \"1f715b04-a647-46de-8588-bff20df5820f\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894457 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f715b04-a647-46de-8588-bff20df5820f-serving-cert\") pod \"1f715b04-a647-46de-8588-bff20df5820f\" (UID: \"1f715b04-a647-46de-8588-bff20df5820f\") " Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894687 4672 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894704 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894716 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n8dg\" (UniqueName: \"kubernetes.io/projected/46244c53-c30f-4b0d-9e2f-873bc7c7c660-kube-api-access-8n8dg\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894728 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46244c53-c30f-4b0d-9e2f-873bc7c7c660-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894739 4672 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46244c53-c30f-4b0d-9e2f-873bc7c7c660-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.894969 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-config" (OuterVolumeSpecName: "config") pod "1f715b04-a647-46de-8588-bff20df5820f" (UID: "1f715b04-a647-46de-8588-bff20df5820f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.895113 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-client-ca" (OuterVolumeSpecName: "client-ca") pod "1f715b04-a647-46de-8588-bff20df5820f" (UID: "1f715b04-a647-46de-8588-bff20df5820f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.898936 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f715b04-a647-46de-8588-bff20df5820f-kube-api-access-qbhlw" (OuterVolumeSpecName: "kube-api-access-qbhlw") pod "1f715b04-a647-46de-8588-bff20df5820f" (UID: "1f715b04-a647-46de-8588-bff20df5820f"). InnerVolumeSpecName "kube-api-access-qbhlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.900091 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f715b04-a647-46de-8588-bff20df5820f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1f715b04-a647-46de-8588-bff20df5820f" (UID: "1f715b04-a647-46de-8588-bff20df5820f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.995999 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.996062 4672 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f715b04-a647-46de-8588-bff20df5820f-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.996075 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbhlw\" (UniqueName: \"kubernetes.io/projected/1f715b04-a647-46de-8588-bff20df5820f-kube-api-access-qbhlw\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:08 crc kubenswrapper[4672]: I1007 15:01:08.996086 4672 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f715b04-a647-46de-8588-bff20df5820f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.562179 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.562390 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rsb29" event={"ID":"46244c53-c30f-4b0d-9e2f-873bc7c7c660","Type":"ContainerDied","Data":"9caaec24ac1392d6dccc7e54dd1df1c67a4b58352669e71a16945062cdc912c4"} Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.562680 4672 scope.go:117] "RemoveContainer" containerID="0a9323c40879beb5fd465833a7653849c7e4e986dfac7ef28294d0d83e93ea84" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.564513 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" event={"ID":"1f715b04-a647-46de-8588-bff20df5820f","Type":"ContainerDied","Data":"7f4170462f039fc3201b2673839d9726a019c97a145a0e5b6511ffaabe4f8e77"} Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.564566 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.583417 4672 scope.go:117] "RemoveContainer" containerID="452b3bbe9b67bc4733f105dea725f73ca3c05aadb302112f62b7d45eda976ec5" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.596495 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rsb29"] Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.599003 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rsb29"] Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.608128 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb"] Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.612329 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxhdb"] Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.883414 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-75ddd988bc-4wdjs"] Oct 07 15:01:09 crc kubenswrapper[4672]: E1007 15:01:09.883644 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46244c53-c30f-4b0d-9e2f-873bc7c7c660" containerName="controller-manager" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.883658 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="46244c53-c30f-4b0d-9e2f-873bc7c7c660" containerName="controller-manager" Oct 07 15:01:09 crc kubenswrapper[4672]: E1007 15:01:09.883670 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dac1f5d-9a0e-469d-9072-c200b51d991a" containerName="collect-profiles" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.883677 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dac1f5d-9a0e-469d-9072-c200b51d991a" containerName="collect-profiles" Oct 07 15:01:09 crc kubenswrapper[4672]: E1007 15:01:09.883693 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f715b04-a647-46de-8588-bff20df5820f" containerName="route-controller-manager" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.883701 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f715b04-a647-46de-8588-bff20df5820f" containerName="route-controller-manager" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.883784 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f715b04-a647-46de-8588-bff20df5820f" containerName="route-controller-manager" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.883793 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dac1f5d-9a0e-469d-9072-c200b51d991a" containerName="collect-profiles" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.883806 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="46244c53-c30f-4b0d-9e2f-873bc7c7c660" containerName="controller-manager" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.884262 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.887390 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.887783 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.888005 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.888265 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.888416 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s"] Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.888468 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.889327 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.889537 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.894547 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.894852 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.895673 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.896215 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.897996 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f715b04-a647-46de-8588-bff20df5820f" path="/var/lib/kubelet/pods/1f715b04-a647-46de-8588-bff20df5820f/volumes" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.898197 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.898730 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46244c53-c30f-4b0d-9e2f-873bc7c7c660" path="/var/lib/kubelet/pods/46244c53-c30f-4b0d-9e2f-873bc7c7c660/volumes" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.902919 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.903116 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s"] Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.905965 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-75ddd988bc-4wdjs"] Oct 07 15:01:09 crc kubenswrapper[4672]: I1007 15:01:09.909503 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.011518 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf6x5\" (UniqueName: \"kubernetes.io/projected/1a31df51-b439-4711-bbc6-98a5cd941e34-kube-api-access-lf6x5\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.011589 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/263a16d0-1124-4aa4-965c-c09f9c18a195-serving-cert\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.011619 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6drr\" (UniqueName: \"kubernetes.io/projected/263a16d0-1124-4aa4-965c-c09f9c18a195-kube-api-access-s6drr\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.011641 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/263a16d0-1124-4aa4-965c-c09f9c18a195-client-ca\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.011660 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/263a16d0-1124-4aa4-965c-c09f9c18a195-proxy-ca-bundles\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.011833 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a31df51-b439-4711-bbc6-98a5cd941e34-client-ca\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.011919 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a31df51-b439-4711-bbc6-98a5cd941e34-serving-cert\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.011985 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/263a16d0-1124-4aa4-965c-c09f9c18a195-config\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.012119 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a31df51-b439-4711-bbc6-98a5cd941e34-config\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.114593 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/263a16d0-1124-4aa4-965c-c09f9c18a195-serving-cert\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.114643 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6drr\" (UniqueName: \"kubernetes.io/projected/263a16d0-1124-4aa4-965c-c09f9c18a195-kube-api-access-s6drr\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.114668 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/263a16d0-1124-4aa4-965c-c09f9c18a195-client-ca\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.114690 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/263a16d0-1124-4aa4-965c-c09f9c18a195-proxy-ca-bundles\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.114713 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a31df51-b439-4711-bbc6-98a5cd941e34-client-ca\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.114737 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a31df51-b439-4711-bbc6-98a5cd941e34-serving-cert\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.114764 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/263a16d0-1124-4aa4-965c-c09f9c18a195-config\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.114792 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a31df51-b439-4711-bbc6-98a5cd941e34-config\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.114813 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf6x5\" (UniqueName: \"kubernetes.io/projected/1a31df51-b439-4711-bbc6-98a5cd941e34-kube-api-access-lf6x5\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.116156 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a31df51-b439-4711-bbc6-98a5cd941e34-client-ca\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.116269 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/263a16d0-1124-4aa4-965c-c09f9c18a195-client-ca\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.116424 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a31df51-b439-4711-bbc6-98a5cd941e34-config\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.116666 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/263a16d0-1124-4aa4-965c-c09f9c18a195-proxy-ca-bundles\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.117811 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/263a16d0-1124-4aa4-965c-c09f9c18a195-config\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.122358 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/263a16d0-1124-4aa4-965c-c09f9c18a195-serving-cert\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.129251 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a31df51-b439-4711-bbc6-98a5cd941e34-serving-cert\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.131591 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6drr\" (UniqueName: \"kubernetes.io/projected/263a16d0-1124-4aa4-965c-c09f9c18a195-kube-api-access-s6drr\") pod \"controller-manager-75ddd988bc-4wdjs\" (UID: \"263a16d0-1124-4aa4-965c-c09f9c18a195\") " pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.133868 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf6x5\" (UniqueName: \"kubernetes.io/projected/1a31df51-b439-4711-bbc6-98a5cd941e34-kube-api-access-lf6x5\") pod \"route-controller-manager-75645c5677-dxg8s\" (UID: \"1a31df51-b439-4711-bbc6-98a5cd941e34\") " pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.203102 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.213471 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.454610 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s"] Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.494840 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-75ddd988bc-4wdjs"] Oct 07 15:01:10 crc kubenswrapper[4672]: W1007 15:01:10.503948 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod263a16d0_1124_4aa4_965c_c09f9c18a195.slice/crio-6d4016f5a8ce75396641c439d101951c14eba0efd6754d9da8bc16c37706885c WatchSource:0}: Error finding container 6d4016f5a8ce75396641c439d101951c14eba0efd6754d9da8bc16c37706885c: Status 404 returned error can't find the container with id 6d4016f5a8ce75396641c439d101951c14eba0efd6754d9da8bc16c37706885c Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.569564 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" event={"ID":"263a16d0-1124-4aa4-965c-c09f9c18a195","Type":"ContainerStarted","Data":"6d4016f5a8ce75396641c439d101951c14eba0efd6754d9da8bc16c37706885c"} Oct 07 15:01:10 crc kubenswrapper[4672]: I1007 15:01:10.574808 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" event={"ID":"1a31df51-b439-4711-bbc6-98a5cd941e34","Type":"ContainerStarted","Data":"787104d1115c54b563464d1eae5278e27b72109967d79b97bacbcb396a08b13c"} Oct 07 15:01:11 crc kubenswrapper[4672]: I1007 15:01:11.581775 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" event={"ID":"263a16d0-1124-4aa4-965c-c09f9c18a195","Type":"ContainerStarted","Data":"375606dd177e78190da37f5d3eb2251759b007a066991ca1ec92eab3d9fdf41e"} Oct 07 15:01:11 crc kubenswrapper[4672]: I1007 15:01:11.582459 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:11 crc kubenswrapper[4672]: I1007 15:01:11.583367 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" event={"ID":"1a31df51-b439-4711-bbc6-98a5cd941e34","Type":"ContainerStarted","Data":"eeb62734b7e808629373ae325fcb005a752713409cf79a7639ffd68487c28297"} Oct 07 15:01:11 crc kubenswrapper[4672]: I1007 15:01:11.584003 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:11 crc kubenswrapper[4672]: I1007 15:01:11.590354 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" Oct 07 15:01:11 crc kubenswrapper[4672]: I1007 15:01:11.591131 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" Oct 07 15:01:11 crc kubenswrapper[4672]: I1007 15:01:11.602705 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-75ddd988bc-4wdjs" podStartSLOduration=2.602685561 podStartE2EDuration="2.602685561s" podCreationTimestamp="2025-10-07 15:01:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:01:11.602249789 +0000 UTC m=+748.577428370" watchObservedRunningTime="2025-10-07 15:01:11.602685561 +0000 UTC m=+748.577864142" Oct 07 15:01:11 crc kubenswrapper[4672]: I1007 15:01:11.620971 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-75645c5677-dxg8s" podStartSLOduration=2.620954562 podStartE2EDuration="2.620954562s" podCreationTimestamp="2025-10-07 15:01:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:01:11.617442322 +0000 UTC m=+748.592620923" watchObservedRunningTime="2025-10-07 15:01:11.620954562 +0000 UTC m=+748.596133143" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.153635 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-4dzmr"] Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.154895 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-4dzmr" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.156620 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.156776 4672 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-g42g6" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.157506 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.162571 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-vbw2d"] Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.163714 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-vbw2d" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.165235 4672 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-xvvc5" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.187525 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-xccp8"] Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.188267 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.189984 4672 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-mvgzv" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.191496 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-vbw2d"] Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.213869 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-xccp8"] Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.213926 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-4dzmr"] Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.289339 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6txw\" (UniqueName: \"kubernetes.io/projected/303de91b-56fc-4579-b771-882f6ec5a53d-kube-api-access-d6txw\") pod \"cert-manager-webhook-5655c58dd6-xccp8\" (UID: \"303de91b-56fc-4579-b771-882f6ec5a53d\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.289406 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq8bv\" (UniqueName: \"kubernetes.io/projected/9d1fa281-ffab-4623-a2cd-a5197c100d6c-kube-api-access-vq8bv\") pod \"cert-manager-5b446d88c5-vbw2d\" (UID: \"9d1fa281-ffab-4623-a2cd-a5197c100d6c\") " pod="cert-manager/cert-manager-5b446d88c5-vbw2d" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.289445 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4kxq\" (UniqueName: \"kubernetes.io/projected/ce160d50-48d0-433f-924a-7f6e08afbb0b-kube-api-access-h4kxq\") pod \"cert-manager-cainjector-7f985d654d-4dzmr\" (UID: \"ce160d50-48d0-433f-924a-7f6e08afbb0b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-4dzmr" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.391087 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4kxq\" (UniqueName: \"kubernetes.io/projected/ce160d50-48d0-433f-924a-7f6e08afbb0b-kube-api-access-h4kxq\") pod \"cert-manager-cainjector-7f985d654d-4dzmr\" (UID: \"ce160d50-48d0-433f-924a-7f6e08afbb0b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-4dzmr" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.391144 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6txw\" (UniqueName: \"kubernetes.io/projected/303de91b-56fc-4579-b771-882f6ec5a53d-kube-api-access-d6txw\") pod \"cert-manager-webhook-5655c58dd6-xccp8\" (UID: \"303de91b-56fc-4579-b771-882f6ec5a53d\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.391205 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq8bv\" (UniqueName: \"kubernetes.io/projected/9d1fa281-ffab-4623-a2cd-a5197c100d6c-kube-api-access-vq8bv\") pod \"cert-manager-5b446d88c5-vbw2d\" (UID: \"9d1fa281-ffab-4623-a2cd-a5197c100d6c\") " pod="cert-manager/cert-manager-5b446d88c5-vbw2d" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.416031 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq8bv\" (UniqueName: \"kubernetes.io/projected/9d1fa281-ffab-4623-a2cd-a5197c100d6c-kube-api-access-vq8bv\") pod \"cert-manager-5b446d88c5-vbw2d\" (UID: \"9d1fa281-ffab-4623-a2cd-a5197c100d6c\") " pod="cert-manager/cert-manager-5b446d88c5-vbw2d" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.416050 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6txw\" (UniqueName: \"kubernetes.io/projected/303de91b-56fc-4579-b771-882f6ec5a53d-kube-api-access-d6txw\") pod \"cert-manager-webhook-5655c58dd6-xccp8\" (UID: \"303de91b-56fc-4579-b771-882f6ec5a53d\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.417227 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4kxq\" (UniqueName: \"kubernetes.io/projected/ce160d50-48d0-433f-924a-7f6e08afbb0b-kube-api-access-h4kxq\") pod \"cert-manager-cainjector-7f985d654d-4dzmr\" (UID: \"ce160d50-48d0-433f-924a-7f6e08afbb0b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-4dzmr" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.476588 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-4dzmr" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.488256 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-vbw2d" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.502420 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.896478 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-4dzmr"] Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.900827 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-vbw2d"] Oct 07 15:01:16 crc kubenswrapper[4672]: I1007 15:01:16.910535 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 15:01:17 crc kubenswrapper[4672]: I1007 15:01:17.013489 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-xccp8"] Oct 07 15:01:17 crc kubenswrapper[4672]: W1007 15:01:17.019520 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod303de91b_56fc_4579_b771_882f6ec5a53d.slice/crio-1f7caa6899c981ca4ac82318d3579244aa4c11e61ce052f9201f8594ee3f99b8 WatchSource:0}: Error finding container 1f7caa6899c981ca4ac82318d3579244aa4c11e61ce052f9201f8594ee3f99b8: Status 404 returned error can't find the container with id 1f7caa6899c981ca4ac82318d3579244aa4c11e61ce052f9201f8594ee3f99b8 Oct 07 15:01:17 crc kubenswrapper[4672]: I1007 15:01:17.623777 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-vbw2d" event={"ID":"9d1fa281-ffab-4623-a2cd-a5197c100d6c","Type":"ContainerStarted","Data":"0e96acd66d13dcc8a22b698f652cc7ff8d1451380a12e8c52d3937805f30cd9b"} Oct 07 15:01:17 crc kubenswrapper[4672]: I1007 15:01:17.624979 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-4dzmr" event={"ID":"ce160d50-48d0-433f-924a-7f6e08afbb0b","Type":"ContainerStarted","Data":"46234f72a42ac34de1d74f5cf3ccd7e7b1f82751d27f9f52093598b4ee187764"} Oct 07 15:01:17 crc kubenswrapper[4672]: I1007 15:01:17.625752 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" event={"ID":"303de91b-56fc-4579-b771-882f6ec5a53d","Type":"ContainerStarted","Data":"1f7caa6899c981ca4ac82318d3579244aa4c11e61ce052f9201f8594ee3f99b8"} Oct 07 15:01:19 crc kubenswrapper[4672]: I1007 15:01:19.247599 4672 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 15:01:19 crc kubenswrapper[4672]: I1007 15:01:19.639309 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" event={"ID":"303de91b-56fc-4579-b771-882f6ec5a53d","Type":"ContainerStarted","Data":"a0e80f93e369836108c75e6d6a67009483360ab17d0ca2fcf174eed7a3409bd0"} Oct 07 15:01:19 crc kubenswrapper[4672]: I1007 15:01:19.639477 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" Oct 07 15:01:19 crc kubenswrapper[4672]: I1007 15:01:19.656266 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" podStartSLOduration=1.5476473149999999 podStartE2EDuration="3.656248463s" podCreationTimestamp="2025-10-07 15:01:16 +0000 UTC" firstStartedPulling="2025-10-07 15:01:17.021638325 +0000 UTC m=+753.996816906" lastFinishedPulling="2025-10-07 15:01:19.130239473 +0000 UTC m=+756.105418054" observedRunningTime="2025-10-07 15:01:19.652087714 +0000 UTC m=+756.627266305" watchObservedRunningTime="2025-10-07 15:01:19.656248463 +0000 UTC m=+756.631427044" Oct 07 15:01:20 crc kubenswrapper[4672]: I1007 15:01:20.647774 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-vbw2d" event={"ID":"9d1fa281-ffab-4623-a2cd-a5197c100d6c","Type":"ContainerStarted","Data":"23e79807e46d7714ae43cfe84c0073bbaac23f3dab8b7213ac5a9928de04327d"} Oct 07 15:01:20 crc kubenswrapper[4672]: I1007 15:01:20.650947 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-4dzmr" event={"ID":"ce160d50-48d0-433f-924a-7f6e08afbb0b","Type":"ContainerStarted","Data":"49f966f57d4728e7d1745c6e1d1e5b51b855150bf81e5c5fa959a267c888aa6b"} Oct 07 15:01:20 crc kubenswrapper[4672]: I1007 15:01:20.665156 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-vbw2d" podStartSLOduration=1.060634317 podStartE2EDuration="4.665137392s" podCreationTimestamp="2025-10-07 15:01:16 +0000 UTC" firstStartedPulling="2025-10-07 15:01:16.91030048 +0000 UTC m=+753.885479061" lastFinishedPulling="2025-10-07 15:01:20.514803555 +0000 UTC m=+757.489982136" observedRunningTime="2025-10-07 15:01:20.659846851 +0000 UTC m=+757.635025432" watchObservedRunningTime="2025-10-07 15:01:20.665137392 +0000 UTC m=+757.640315973" Oct 07 15:01:20 crc kubenswrapper[4672]: I1007 15:01:20.681998 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-4dzmr" podStartSLOduration=1.166549498 podStartE2EDuration="4.681978822s" podCreationTimestamp="2025-10-07 15:01:16 +0000 UTC" firstStartedPulling="2025-10-07 15:01:16.910868937 +0000 UTC m=+753.886047518" lastFinishedPulling="2025-10-07 15:01:20.426298261 +0000 UTC m=+757.401476842" observedRunningTime="2025-10-07 15:01:20.677731421 +0000 UTC m=+757.652910002" watchObservedRunningTime="2025-10-07 15:01:20.681978822 +0000 UTC m=+757.657157393" Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.505251 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-xccp8" Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.650479 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.650540 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.828642 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2bqr7"] Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.828972 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovn-controller" containerID="cri-o://78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669" gracePeriod=30 Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.828982 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="nbdb" containerID="cri-o://53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b" gracePeriod=30 Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.829100 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="sbdb" containerID="cri-o://b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845" gracePeriod=30 Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.829092 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="northd" containerID="cri-o://808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49" gracePeriod=30 Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.829121 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a" gracePeriod=30 Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.829149 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kube-rbac-proxy-node" containerID="cri-o://b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a" gracePeriod=30 Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.829155 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovn-acl-logging" containerID="cri-o://7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7" gracePeriod=30 Oct 07 15:01:26 crc kubenswrapper[4672]: I1007 15:01:26.865412 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" containerID="cri-o://85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca" gracePeriod=30 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.105865 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/3.log" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.108121 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovn-acl-logging/0.log" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.108617 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovn-controller/0.log" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.109004 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.142003 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-netns\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.142093 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-systemd\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.142157 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.142239 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143141 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-etc-openvswitch\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143187 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-ovn-kubernetes\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143217 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-node-log\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143249 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-slash\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143260 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143279 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-netd\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143293 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-node-log" (OuterVolumeSpecName: "node-log") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143316 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sps29\" (UniqueName: \"kubernetes.io/projected/be16af59-c2d0-4922-803f-bf1544dd0973-kube-api-access-sps29\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143322 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-slash" (OuterVolumeSpecName: "host-slash") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143346 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-log-socket\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143372 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143375 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-var-lib-openvswitch\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143418 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143434 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-openvswitch\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143473 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-script-lib\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143495 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-env-overrides\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143516 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-ovn\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143541 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-config\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143562 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-log-socket" (OuterVolumeSpecName: "log-socket") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143578 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-kubelet\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143598 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143628 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143633 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-bin\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143680 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-systemd-units\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143714 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-var-lib-cni-networks-ovn-kubernetes\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143748 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/be16af59-c2d0-4922-803f-bf1544dd0973-ovn-node-metrics-cert\") pod \"be16af59-c2d0-4922-803f-bf1544dd0973\" (UID: \"be16af59-c2d0-4922-803f-bf1544dd0973\") " Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143802 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143838 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143875 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143928 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.143982 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144135 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144209 4672 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144232 4672 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144244 4672 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144253 4672 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144264 4672 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144272 4672 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144284 4672 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144292 4672 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-node-log\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144301 4672 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144311 4672 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-slash\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144319 4672 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144327 4672 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-log-socket\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144337 4672 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144347 4672 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144358 4672 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144369 4672 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.144208 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.149077 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be16af59-c2d0-4922-803f-bf1544dd0973-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.149432 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be16af59-c2d0-4922-803f-bf1544dd0973-kube-api-access-sps29" (OuterVolumeSpecName: "kube-api-access-sps29") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "kube-api-access-sps29". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.157642 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-v8nh4"] Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.157913 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.157950 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.157958 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="sbdb" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.157964 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="sbdb" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.157975 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="northd" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.157982 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="northd" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.157989 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kubecfg-setup" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.157994 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kubecfg-setup" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.158004 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158043 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.158052 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovn-acl-logging" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158059 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovn-acl-logging" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.158068 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158075 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.158083 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kube-rbac-proxy-node" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158090 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kube-rbac-proxy-node" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.158114 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovn-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158122 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovn-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.158135 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158140 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.158148 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="nbdb" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158154 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="nbdb" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158239 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="nbdb" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158249 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kube-rbac-proxy-node" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158256 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158262 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovn-acl-logging" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158269 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158276 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158284 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158293 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="northd" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158300 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158306 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="sbdb" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158316 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovn-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.158397 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158404 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158503 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.158580 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.158587 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" containerName="ovnkube-controller" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.170134 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "be16af59-c2d0-4922-803f-bf1544dd0973" (UID: "be16af59-c2d0-4922-803f-bf1544dd0973"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.170513 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.245154 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e5344f6e-0400-4e9f-ac50-2644c67c0a13-ovnkube-script-lib\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.245376 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e5344f6e-0400-4e9f-ac50-2644c67c0a13-ovn-node-metrics-cert\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.245488 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-log-socket\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.245571 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-cni-bin\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.245666 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-run-openvswitch\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.245744 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-run-systemd\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.245829 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-slash\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.245904 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-node-log\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.245969 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-run-ovn\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246037 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-kubelet\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246143 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksmzh\" (UniqueName: \"kubernetes.io/projected/e5344f6e-0400-4e9f-ac50-2644c67c0a13-kube-api-access-ksmzh\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246218 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-run-ovn-kubernetes\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246297 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-systemd-units\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246367 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-cni-netd\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246465 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246555 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-etc-openvswitch\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246626 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-var-lib-openvswitch\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246693 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e5344f6e-0400-4e9f-ac50-2644c67c0a13-ovnkube-config\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246752 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e5344f6e-0400-4e9f-ac50-2644c67c0a13-env-overrides\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246819 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-run-netns\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246902 4672 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/be16af59-c2d0-4922-803f-bf1544dd0973-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.246964 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sps29\" (UniqueName: \"kubernetes.io/projected/be16af59-c2d0-4922-803f-bf1544dd0973-kube-api-access-sps29\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.247021 4672 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/be16af59-c2d0-4922-803f-bf1544dd0973-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.247092 4672 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/be16af59-c2d0-4922-803f-bf1544dd0973-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347536 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-run-systemd\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347573 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-slash\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347590 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-node-log\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347605 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-run-ovn\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347618 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-kubelet\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347637 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksmzh\" (UniqueName: \"kubernetes.io/projected/e5344f6e-0400-4e9f-ac50-2644c67c0a13-kube-api-access-ksmzh\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347655 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-run-ovn-kubernetes\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347665 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-run-systemd\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347695 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-systemd-units\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347675 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-systemd-units\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347712 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-kubelet\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347724 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-cni-netd\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347735 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-node-log\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347741 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347756 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-run-ovn\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347760 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-etc-openvswitch\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347775 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-etc-openvswitch\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347782 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-var-lib-openvswitch\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347801 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e5344f6e-0400-4e9f-ac50-2644c67c0a13-ovnkube-config\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347814 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e5344f6e-0400-4e9f-ac50-2644c67c0a13-env-overrides\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347827 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-run-netns\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347847 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e5344f6e-0400-4e9f-ac50-2644c67c0a13-ovnkube-script-lib\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347863 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e5344f6e-0400-4e9f-ac50-2644c67c0a13-ovn-node-metrics-cert\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347881 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-log-socket\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347905 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-cni-bin\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347925 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-run-openvswitch\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347970 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-run-openvswitch\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.347992 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-var-lib-openvswitch\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348011 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-run-ovn-kubernetes\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348019 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-cni-netd\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348089 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348230 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-log-socket\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348270 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-slash\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348311 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-cni-bin\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348379 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e5344f6e-0400-4e9f-ac50-2644c67c0a13-host-run-netns\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348763 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e5344f6e-0400-4e9f-ac50-2644c67c0a13-env-overrides\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348787 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e5344f6e-0400-4e9f-ac50-2644c67c0a13-ovnkube-config\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.348811 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e5344f6e-0400-4e9f-ac50-2644c67c0a13-ovnkube-script-lib\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.351561 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e5344f6e-0400-4e9f-ac50-2644c67c0a13-ovn-node-metrics-cert\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.364403 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksmzh\" (UniqueName: \"kubernetes.io/projected/e5344f6e-0400-4e9f-ac50-2644c67c0a13-kube-api-access-ksmzh\") pod \"ovnkube-node-v8nh4\" (UID: \"e5344f6e-0400-4e9f-ac50-2644c67c0a13\") " pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.504265 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:27 crc kubenswrapper[4672]: W1007 15:01:27.520142 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5344f6e_0400_4e9f_ac50_2644c67c0a13.slice/crio-9dec422c3f87e22d1d198fa858080e71abd5e7d3ba654b9add8e8a2eacabf356 WatchSource:0}: Error finding container 9dec422c3f87e22d1d198fa858080e71abd5e7d3ba654b9add8e8a2eacabf356: Status 404 returned error can't find the container with id 9dec422c3f87e22d1d198fa858080e71abd5e7d3ba654b9add8e8a2eacabf356 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.684980 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovnkube-controller/3.log" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.687935 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovn-acl-logging/0.log" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.688563 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bqr7_be16af59-c2d0-4922-803f-bf1544dd0973/ovn-controller/0.log" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.688965 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca" exitCode=0 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.688994 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845" exitCode=0 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689005 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b" exitCode=0 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689012 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49" exitCode=0 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689020 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a" exitCode=0 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689030 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a" exitCode=0 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689056 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7" exitCode=143 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689050 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689117 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689135 4672 scope.go:117] "RemoveContainer" containerID="85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689066 4672 generic.go:334] "Generic (PLEG): container finished" podID="be16af59-c2d0-4922-803f-bf1544dd0973" containerID="78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669" exitCode=143 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689120 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689327 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689383 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689403 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689416 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689429 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689441 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689473 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689481 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689487 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689493 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689501 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689507 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689513 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689526 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689562 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689571 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689577 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689584 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689591 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689597 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689603 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689609 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689619 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689649 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689661 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689674 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689684 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689691 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689698 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689731 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689740 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689748 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689763 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689770 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689777 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689791 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bqr7" event={"ID":"be16af59-c2d0-4922-803f-bf1544dd0973","Type":"ContainerDied","Data":"1a5bc11e36d8181a783611345a15c4004812c1758a0a8453ad3d1ae6c6ab0ed6"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689826 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689835 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689843 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689849 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689856 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689862 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689868 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689877 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689907 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.689913 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.692209 4672 generic.go:334] "Generic (PLEG): container finished" podID="e5344f6e-0400-4e9f-ac50-2644c67c0a13" containerID="022530353dc65b177e001159137b3c20fd13f82c7365f9a690e4f85084c7a659" exitCode=0 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.692280 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerDied","Data":"022530353dc65b177e001159137b3c20fd13f82c7365f9a690e4f85084c7a659"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.692320 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerStarted","Data":"9dec422c3f87e22d1d198fa858080e71abd5e7d3ba654b9add8e8a2eacabf356"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.694195 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/2.log" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.694739 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/1.log" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.694838 4672 generic.go:334] "Generic (PLEG): container finished" podID="a0a0e29e-f4b1-4573-b5a7-3dc297f92a62" containerID="b55b4513fc3f63daea26199ada496c4865e05f40aa4e1fbf2129675e324d5a0c" exitCode=2 Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.694879 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l8k8z" event={"ID":"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62","Type":"ContainerDied","Data":"b55b4513fc3f63daea26199ada496c4865e05f40aa4e1fbf2129675e324d5a0c"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.694931 4672 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925"} Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.695412 4672 scope.go:117] "RemoveContainer" containerID="b55b4513fc3f63daea26199ada496c4865e05f40aa4e1fbf2129675e324d5a0c" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.749827 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.778076 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2bqr7"] Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.781372 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2bqr7"] Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.781533 4672 scope.go:117] "RemoveContainer" containerID="b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.796792 4672 scope.go:117] "RemoveContainer" containerID="53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.820348 4672 scope.go:117] "RemoveContainer" containerID="808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.836999 4672 scope.go:117] "RemoveContainer" containerID="e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.849161 4672 scope.go:117] "RemoveContainer" containerID="b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.865883 4672 scope.go:117] "RemoveContainer" containerID="7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.876719 4672 scope.go:117] "RemoveContainer" containerID="78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.897650 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be16af59-c2d0-4922-803f-bf1544dd0973" path="/var/lib/kubelet/pods/be16af59-c2d0-4922-803f-bf1544dd0973/volumes" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.897862 4672 scope.go:117] "RemoveContainer" containerID="271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.915930 4672 scope.go:117] "RemoveContainer" containerID="85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.917261 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": container with ID starting with 85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca not found: ID does not exist" containerID="85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.917296 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} err="failed to get container status \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": rpc error: code = NotFound desc = could not find container \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": container with ID starting with 85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.917322 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.917722 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\": container with ID starting with 3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918 not found: ID does not exist" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.917766 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} err="failed to get container status \"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\": rpc error: code = NotFound desc = could not find container \"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\": container with ID starting with 3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.917809 4672 scope.go:117] "RemoveContainer" containerID="b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.918297 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\": container with ID starting with b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845 not found: ID does not exist" containerID="b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.918352 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} err="failed to get container status \"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\": rpc error: code = NotFound desc = could not find container \"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\": container with ID starting with b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.918374 4672 scope.go:117] "RemoveContainer" containerID="53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.918762 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\": container with ID starting with 53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b not found: ID does not exist" containerID="53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.918803 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} err="failed to get container status \"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\": rpc error: code = NotFound desc = could not find container \"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\": container with ID starting with 53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.918833 4672 scope.go:117] "RemoveContainer" containerID="808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.919196 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\": container with ID starting with 808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49 not found: ID does not exist" containerID="808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.919323 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} err="failed to get container status \"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\": rpc error: code = NotFound desc = could not find container \"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\": container with ID starting with 808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.919346 4672 scope.go:117] "RemoveContainer" containerID="e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.919711 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\": container with ID starting with e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a not found: ID does not exist" containerID="e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.919752 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} err="failed to get container status \"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\": rpc error: code = NotFound desc = could not find container \"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\": container with ID starting with e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.919803 4672 scope.go:117] "RemoveContainer" containerID="b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.920079 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\": container with ID starting with b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a not found: ID does not exist" containerID="b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.920102 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} err="failed to get container status \"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\": rpc error: code = NotFound desc = could not find container \"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\": container with ID starting with b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.920118 4672 scope.go:117] "RemoveContainer" containerID="7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.920338 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\": container with ID starting with 7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7 not found: ID does not exist" containerID="7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.920383 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} err="failed to get container status \"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\": rpc error: code = NotFound desc = could not find container \"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\": container with ID starting with 7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.920398 4672 scope.go:117] "RemoveContainer" containerID="78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.920714 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\": container with ID starting with 78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669 not found: ID does not exist" containerID="78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.920739 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} err="failed to get container status \"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\": rpc error: code = NotFound desc = could not find container \"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\": container with ID starting with 78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.920754 4672 scope.go:117] "RemoveContainer" containerID="271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716" Oct 07 15:01:27 crc kubenswrapper[4672]: E1007 15:01:27.920943 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\": container with ID starting with 271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716 not found: ID does not exist" containerID="271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.920972 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716"} err="failed to get container status \"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\": rpc error: code = NotFound desc = could not find container \"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\": container with ID starting with 271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.920991 4672 scope.go:117] "RemoveContainer" containerID="85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.921411 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} err="failed to get container status \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": rpc error: code = NotFound desc = could not find container \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": container with ID starting with 85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.921433 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.921639 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} err="failed to get container status \"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\": rpc error: code = NotFound desc = could not find container \"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\": container with ID starting with 3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.921663 4672 scope.go:117] "RemoveContainer" containerID="b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.921896 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} err="failed to get container status \"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\": rpc error: code = NotFound desc = could not find container \"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\": container with ID starting with b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.921943 4672 scope.go:117] "RemoveContainer" containerID="53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.922197 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} err="failed to get container status \"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\": rpc error: code = NotFound desc = could not find container \"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\": container with ID starting with 53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.922219 4672 scope.go:117] "RemoveContainer" containerID="808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.922495 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} err="failed to get container status \"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\": rpc error: code = NotFound desc = could not find container \"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\": container with ID starting with 808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.922556 4672 scope.go:117] "RemoveContainer" containerID="e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.922791 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} err="failed to get container status \"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\": rpc error: code = NotFound desc = could not find container \"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\": container with ID starting with e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.922838 4672 scope.go:117] "RemoveContainer" containerID="b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.923049 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} err="failed to get container status \"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\": rpc error: code = NotFound desc = could not find container \"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\": container with ID starting with b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.923067 4672 scope.go:117] "RemoveContainer" containerID="7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.923284 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} err="failed to get container status \"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\": rpc error: code = NotFound desc = could not find container \"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\": container with ID starting with 7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.923303 4672 scope.go:117] "RemoveContainer" containerID="78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.923522 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} err="failed to get container status \"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\": rpc error: code = NotFound desc = could not find container \"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\": container with ID starting with 78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.923540 4672 scope.go:117] "RemoveContainer" containerID="271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.923738 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716"} err="failed to get container status \"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\": rpc error: code = NotFound desc = could not find container \"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\": container with ID starting with 271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.923761 4672 scope.go:117] "RemoveContainer" containerID="85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.924184 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} err="failed to get container status \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": rpc error: code = NotFound desc = could not find container \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": container with ID starting with 85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.924220 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.924502 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} err="failed to get container status \"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\": rpc error: code = NotFound desc = could not find container \"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\": container with ID starting with 3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.924521 4672 scope.go:117] "RemoveContainer" containerID="b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.925315 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} err="failed to get container status \"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\": rpc error: code = NotFound desc = could not find container \"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\": container with ID starting with b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.925369 4672 scope.go:117] "RemoveContainer" containerID="53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.925610 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} err="failed to get container status \"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\": rpc error: code = NotFound desc = could not find container \"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\": container with ID starting with 53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.925641 4672 scope.go:117] "RemoveContainer" containerID="808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.925953 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} err="failed to get container status \"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\": rpc error: code = NotFound desc = could not find container \"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\": container with ID starting with 808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.925984 4672 scope.go:117] "RemoveContainer" containerID="e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.926202 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} err="failed to get container status \"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\": rpc error: code = NotFound desc = could not find container \"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\": container with ID starting with e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.926222 4672 scope.go:117] "RemoveContainer" containerID="b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.926477 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} err="failed to get container status \"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\": rpc error: code = NotFound desc = could not find container \"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\": container with ID starting with b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.926504 4672 scope.go:117] "RemoveContainer" containerID="7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.926709 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} err="failed to get container status \"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\": rpc error: code = NotFound desc = could not find container \"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\": container with ID starting with 7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.926729 4672 scope.go:117] "RemoveContainer" containerID="78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.927202 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} err="failed to get container status \"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\": rpc error: code = NotFound desc = could not find container \"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\": container with ID starting with 78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.927240 4672 scope.go:117] "RemoveContainer" containerID="271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.927761 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716"} err="failed to get container status \"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\": rpc error: code = NotFound desc = could not find container \"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\": container with ID starting with 271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.927805 4672 scope.go:117] "RemoveContainer" containerID="85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.928143 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} err="failed to get container status \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": rpc error: code = NotFound desc = could not find container \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": container with ID starting with 85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.928220 4672 scope.go:117] "RemoveContainer" containerID="3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.928516 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918"} err="failed to get container status \"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\": rpc error: code = NotFound desc = could not find container \"3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918\": container with ID starting with 3faa32c778a550fae25def55e7fba49ceb91b2bb28932d4f1c054ab879f69918 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.928542 4672 scope.go:117] "RemoveContainer" containerID="b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.928939 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845"} err="failed to get container status \"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\": rpc error: code = NotFound desc = could not find container \"b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845\": container with ID starting with b9ccd999218ab771c80d7f28c3039bb52077265730d05c60ccb5415f6902b845 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.928967 4672 scope.go:117] "RemoveContainer" containerID="53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.929525 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b"} err="failed to get container status \"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\": rpc error: code = NotFound desc = could not find container \"53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b\": container with ID starting with 53f278a083da8e154081609e22146199e555b2ee5933e59f2158fa634bfb922b not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.929552 4672 scope.go:117] "RemoveContainer" containerID="808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.930067 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49"} err="failed to get container status \"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\": rpc error: code = NotFound desc = could not find container \"808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49\": container with ID starting with 808bc7f2b51c20311a2356e08a7760ad0ff9aa8c3fa3f2ee1bd444161e176a49 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.930118 4672 scope.go:117] "RemoveContainer" containerID="e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.930682 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a"} err="failed to get container status \"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\": rpc error: code = NotFound desc = could not find container \"e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a\": container with ID starting with e65b95b4bea3f8c30dfce44ff1ab3577b50cd00772012e1f6879d3f52b04f01a not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.930730 4672 scope.go:117] "RemoveContainer" containerID="b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.931117 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a"} err="failed to get container status \"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\": rpc error: code = NotFound desc = could not find container \"b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a\": container with ID starting with b0d3ca9f99f0ad85e95c9f6429c3ef6afebe7c069f6dc55b30cfcbdc819f670a not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.931147 4672 scope.go:117] "RemoveContainer" containerID="7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.931490 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7"} err="failed to get container status \"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\": rpc error: code = NotFound desc = could not find container \"7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7\": container with ID starting with 7db2e5733818eac5fd991c50fcfb30924ecef0e4c85fb05abf34ce7b249bdcc7 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.931519 4672 scope.go:117] "RemoveContainer" containerID="78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.931867 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669"} err="failed to get container status \"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\": rpc error: code = NotFound desc = could not find container \"78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669\": container with ID starting with 78cc55205bd192cf711270d3a2b1ea4f924099c7104697d953281dfd9db0d669 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.931914 4672 scope.go:117] "RemoveContainer" containerID="271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.932145 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716"} err="failed to get container status \"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\": rpc error: code = NotFound desc = could not find container \"271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716\": container with ID starting with 271cae0675384b5ff489293422da1ee3198cd58bb0853ec0cd613f45fae16716 not found: ID does not exist" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.932190 4672 scope.go:117] "RemoveContainer" containerID="85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca" Oct 07 15:01:27 crc kubenswrapper[4672]: I1007 15:01:27.932395 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca"} err="failed to get container status \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": rpc error: code = NotFound desc = could not find container \"85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca\": container with ID starting with 85c4b86266fb27a5425490a485f22b9c57889dc58e24b0aa0420ea9c766d93ca not found: ID does not exist" Oct 07 15:01:28 crc kubenswrapper[4672]: I1007 15:01:28.703278 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerStarted","Data":"9a9815d30980769a05990b7a8ce719c87aa8488808baa95d7310a8a5798cef0d"} Oct 07 15:01:28 crc kubenswrapper[4672]: I1007 15:01:28.703635 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerStarted","Data":"ba012d481bb70b042cc726a85c927b0873e47bf53666e41ca7cd858a1fae054e"} Oct 07 15:01:28 crc kubenswrapper[4672]: I1007 15:01:28.703655 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerStarted","Data":"fa43c1247e928f6a5e585fcb0d32d31517ad79dfb76c29990127ea2737158167"} Oct 07 15:01:28 crc kubenswrapper[4672]: I1007 15:01:28.703666 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerStarted","Data":"868320b4b6dba15dcd28229eba9e3dc41a0dbc9c9d9e792166769963681a7cae"} Oct 07 15:01:28 crc kubenswrapper[4672]: I1007 15:01:28.703677 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerStarted","Data":"b30131f5afee83fbd4841a30d38cb14bf66a0606788bd492ae3d78c1669e1e0c"} Oct 07 15:01:28 crc kubenswrapper[4672]: I1007 15:01:28.703688 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerStarted","Data":"e4f7e24c7b56bf17a719cfdeae0e9c4e98cb51ab011a3571dcd57e53726a0312"} Oct 07 15:01:28 crc kubenswrapper[4672]: I1007 15:01:28.705291 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/2.log" Oct 07 15:01:28 crc kubenswrapper[4672]: I1007 15:01:28.705722 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/1.log" Oct 07 15:01:28 crc kubenswrapper[4672]: I1007 15:01:28.705811 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l8k8z" event={"ID":"a0a0e29e-f4b1-4573-b5a7-3dc297f92a62","Type":"ContainerStarted","Data":"f9dce9b3f2d8bd8f3275bab2f9b5d0cef44aea9b60f86af6e07d2dca4721e43c"} Oct 07 15:01:30 crc kubenswrapper[4672]: I1007 15:01:30.721599 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerStarted","Data":"c57a46c408500d2b3637fb45e80351c8b4c2645f88bb80db46051a069be54a07"} Oct 07 15:01:33 crc kubenswrapper[4672]: I1007 15:01:33.737949 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" event={"ID":"e5344f6e-0400-4e9f-ac50-2644c67c0a13","Type":"ContainerStarted","Data":"09b36128d91868fe7131714a43f538eb4194580070715f7c3194cf0cf46f9312"} Oct 07 15:01:33 crc kubenswrapper[4672]: I1007 15:01:33.738499 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:33 crc kubenswrapper[4672]: I1007 15:01:33.738515 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:33 crc kubenswrapper[4672]: I1007 15:01:33.738524 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:33 crc kubenswrapper[4672]: I1007 15:01:33.765319 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:33 crc kubenswrapper[4672]: I1007 15:01:33.766042 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" podStartSLOduration=6.766006281 podStartE2EDuration="6.766006281s" podCreationTimestamp="2025-10-07 15:01:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:01:33.763483829 +0000 UTC m=+770.738662460" watchObservedRunningTime="2025-10-07 15:01:33.766006281 +0000 UTC m=+770.741184872" Oct 07 15:01:33 crc kubenswrapper[4672]: I1007 15:01:33.768841 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:44 crc kubenswrapper[4672]: I1007 15:01:44.071787 4672 scope.go:117] "RemoveContainer" containerID="a0b78c063f3a3439d3032010e2cf4bbb459e0bfcca879412c5c70d59ee38d925" Oct 07 15:01:44 crc kubenswrapper[4672]: I1007 15:01:44.802491 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l8k8z_a0a0e29e-f4b1-4573-b5a7-3dc297f92a62/kube-multus/2.log" Oct 07 15:01:56 crc kubenswrapper[4672]: I1007 15:01:56.650626 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:01:56 crc kubenswrapper[4672]: I1007 15:01:56.651180 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:01:56 crc kubenswrapper[4672]: I1007 15:01:56.651235 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:01:56 crc kubenswrapper[4672]: I1007 15:01:56.651776 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e7f37a84632604d116b2f44fdb47fdd21bdcd9295b5577b67f25b2f2a0425bdb"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:01:56 crc kubenswrapper[4672]: I1007 15:01:56.651830 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://e7f37a84632604d116b2f44fdb47fdd21bdcd9295b5577b67f25b2f2a0425bdb" gracePeriod=600 Oct 07 15:01:56 crc kubenswrapper[4672]: I1007 15:01:56.861845 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="e7f37a84632604d116b2f44fdb47fdd21bdcd9295b5577b67f25b2f2a0425bdb" exitCode=0 Oct 07 15:01:56 crc kubenswrapper[4672]: I1007 15:01:56.861934 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"e7f37a84632604d116b2f44fdb47fdd21bdcd9295b5577b67f25b2f2a0425bdb"} Oct 07 15:01:56 crc kubenswrapper[4672]: I1007 15:01:56.862396 4672 scope.go:117] "RemoveContainer" containerID="cdc0c1ec78915bfb28078432a0c6c8760aad745b9a483755c11b6137150bc91e" Oct 07 15:01:57 crc kubenswrapper[4672]: I1007 15:01:57.534635 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-v8nh4" Oct 07 15:01:57 crc kubenswrapper[4672]: I1007 15:01:57.870550 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"be2c87c8e05952dbb0cedcf0cbcc2ad775db0f756c18accfa273872c875b7f56"} Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.672683 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm"] Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.675492 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.677099 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.682856 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm"] Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.818528 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djr6t\" (UniqueName: \"kubernetes.io/projected/a8c3719c-2c5b-489c-a1ae-4879a537b65a-kube-api-access-djr6t\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.818931 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.819293 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.920330 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.920653 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.920790 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djr6t\" (UniqueName: \"kubernetes.io/projected/a8c3719c-2c5b-489c-a1ae-4879a537b65a-kube-api-access-djr6t\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.920990 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.921166 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.939560 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djr6t\" (UniqueName: \"kubernetes.io/projected/a8c3719c-2c5b-489c-a1ae-4879a537b65a-kube-api-access-djr6t\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:05 crc kubenswrapper[4672]: I1007 15:02:05.999759 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:06 crc kubenswrapper[4672]: I1007 15:02:06.382350 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm"] Oct 07 15:02:06 crc kubenswrapper[4672]: I1007 15:02:06.929882 4672 generic.go:334] "Generic (PLEG): container finished" podID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerID="35c5cebec5c3fedf409eca12d7075da76a8e02c496a8cebc10c91ffc527fa5de" exitCode=0 Oct 07 15:02:06 crc kubenswrapper[4672]: I1007 15:02:06.929991 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" event={"ID":"a8c3719c-2c5b-489c-a1ae-4879a537b65a","Type":"ContainerDied","Data":"35c5cebec5c3fedf409eca12d7075da76a8e02c496a8cebc10c91ffc527fa5de"} Oct 07 15:02:06 crc kubenswrapper[4672]: I1007 15:02:06.930249 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" event={"ID":"a8c3719c-2c5b-489c-a1ae-4879a537b65a","Type":"ContainerStarted","Data":"d2de9c00012b96a96e10a5c72a867282101cfaf2aff9e79cc620e728d3398e92"} Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.034679 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ms84f"] Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.035845 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.049221 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ms84f"] Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.148971 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-utilities\") pod \"redhat-operators-ms84f\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.149357 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czqxh\" (UniqueName: \"kubernetes.io/projected/94805e19-68fc-44a0-9402-c3c14498049c-kube-api-access-czqxh\") pod \"redhat-operators-ms84f\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.149396 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-catalog-content\") pod \"redhat-operators-ms84f\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.250795 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czqxh\" (UniqueName: \"kubernetes.io/projected/94805e19-68fc-44a0-9402-c3c14498049c-kube-api-access-czqxh\") pod \"redhat-operators-ms84f\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.250853 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-catalog-content\") pod \"redhat-operators-ms84f\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.250895 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-utilities\") pod \"redhat-operators-ms84f\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.251392 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-utilities\") pod \"redhat-operators-ms84f\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.251898 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-catalog-content\") pod \"redhat-operators-ms84f\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.269256 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czqxh\" (UniqueName: \"kubernetes.io/projected/94805e19-68fc-44a0-9402-c3c14498049c-kube-api-access-czqxh\") pod \"redhat-operators-ms84f\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.350682 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.570426 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ms84f"] Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.941206 4672 generic.go:334] "Generic (PLEG): container finished" podID="94805e19-68fc-44a0-9402-c3c14498049c" containerID="f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3" exitCode=0 Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.941349 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms84f" event={"ID":"94805e19-68fc-44a0-9402-c3c14498049c","Type":"ContainerDied","Data":"f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3"} Oct 07 15:02:08 crc kubenswrapper[4672]: I1007 15:02:08.941601 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms84f" event={"ID":"94805e19-68fc-44a0-9402-c3c14498049c","Type":"ContainerStarted","Data":"f1da267bca9899337616792ad25ba50c9ad097fe5ee4126864d640aa1c0be526"} Oct 07 15:02:09 crc kubenswrapper[4672]: I1007 15:02:09.949730 4672 generic.go:334] "Generic (PLEG): container finished" podID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerID="4305c6b4105d73d37b93cb5dba1a09b8194598549d09db5202bd6443e4bfba00" exitCode=0 Oct 07 15:02:09 crc kubenswrapper[4672]: I1007 15:02:09.949836 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" event={"ID":"a8c3719c-2c5b-489c-a1ae-4879a537b65a","Type":"ContainerDied","Data":"4305c6b4105d73d37b93cb5dba1a09b8194598549d09db5202bd6443e4bfba00"} Oct 07 15:02:09 crc kubenswrapper[4672]: I1007 15:02:09.952131 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms84f" event={"ID":"94805e19-68fc-44a0-9402-c3c14498049c","Type":"ContainerStarted","Data":"127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b"} Oct 07 15:02:10 crc kubenswrapper[4672]: I1007 15:02:10.959739 4672 generic.go:334] "Generic (PLEG): container finished" podID="94805e19-68fc-44a0-9402-c3c14498049c" containerID="127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b" exitCode=0 Oct 07 15:02:10 crc kubenswrapper[4672]: I1007 15:02:10.959835 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms84f" event={"ID":"94805e19-68fc-44a0-9402-c3c14498049c","Type":"ContainerDied","Data":"127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b"} Oct 07 15:02:10 crc kubenswrapper[4672]: I1007 15:02:10.966324 4672 generic.go:334] "Generic (PLEG): container finished" podID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerID="c68d54d5bf0e511d6c27ab35e71b049534a0c4ab83375d3c77c159ea3997381c" exitCode=0 Oct 07 15:02:10 crc kubenswrapper[4672]: I1007 15:02:10.966374 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" event={"ID":"a8c3719c-2c5b-489c-a1ae-4879a537b65a","Type":"ContainerDied","Data":"c68d54d5bf0e511d6c27ab35e71b049534a0c4ab83375d3c77c159ea3997381c"} Oct 07 15:02:11 crc kubenswrapper[4672]: I1007 15:02:11.973974 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms84f" event={"ID":"94805e19-68fc-44a0-9402-c3c14498049c","Type":"ContainerStarted","Data":"9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f"} Oct 07 15:02:11 crc kubenswrapper[4672]: I1007 15:02:11.989966 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ms84f" podStartSLOduration=1.534171056 podStartE2EDuration="3.989948174s" podCreationTimestamp="2025-10-07 15:02:08 +0000 UTC" firstStartedPulling="2025-10-07 15:02:08.94969453 +0000 UTC m=+805.924873111" lastFinishedPulling="2025-10-07 15:02:11.405471648 +0000 UTC m=+808.380650229" observedRunningTime="2025-10-07 15:02:11.988459381 +0000 UTC m=+808.963637962" watchObservedRunningTime="2025-10-07 15:02:11.989948174 +0000 UTC m=+808.965126755" Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.204478 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.313949 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djr6t\" (UniqueName: \"kubernetes.io/projected/a8c3719c-2c5b-489c-a1ae-4879a537b65a-kube-api-access-djr6t\") pod \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.314065 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-util\") pod \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.314156 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-bundle\") pod \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\" (UID: \"a8c3719c-2c5b-489c-a1ae-4879a537b65a\") " Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.314834 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-bundle" (OuterVolumeSpecName: "bundle") pod "a8c3719c-2c5b-489c-a1ae-4879a537b65a" (UID: "a8c3719c-2c5b-489c-a1ae-4879a537b65a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.319732 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8c3719c-2c5b-489c-a1ae-4879a537b65a-kube-api-access-djr6t" (OuterVolumeSpecName: "kube-api-access-djr6t") pod "a8c3719c-2c5b-489c-a1ae-4879a537b65a" (UID: "a8c3719c-2c5b-489c-a1ae-4879a537b65a"). InnerVolumeSpecName "kube-api-access-djr6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.324787 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-util" (OuterVolumeSpecName: "util") pod "a8c3719c-2c5b-489c-a1ae-4879a537b65a" (UID: "a8c3719c-2c5b-489c-a1ae-4879a537b65a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.416088 4672 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.416125 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djr6t\" (UniqueName: \"kubernetes.io/projected/a8c3719c-2c5b-489c-a1ae-4879a537b65a-kube-api-access-djr6t\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.416135 4672 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8c3719c-2c5b-489c-a1ae-4879a537b65a-util\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.985045 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.985136 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm" event={"ID":"a8c3719c-2c5b-489c-a1ae-4879a537b65a","Type":"ContainerDied","Data":"d2de9c00012b96a96e10a5c72a867282101cfaf2aff9e79cc620e728d3398e92"} Oct 07 15:02:12 crc kubenswrapper[4672]: I1007 15:02:12.985163 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2de9c00012b96a96e10a5c72a867282101cfaf2aff9e79cc620e728d3398e92" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.182617 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-p47n6"] Oct 07 15:02:17 crc kubenswrapper[4672]: E1007 15:02:17.183354 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerName="pull" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.183369 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerName="pull" Oct 07 15:02:17 crc kubenswrapper[4672]: E1007 15:02:17.183382 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerName="util" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.183389 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerName="util" Oct 07 15:02:17 crc kubenswrapper[4672]: E1007 15:02:17.183404 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerName="extract" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.183410 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerName="extract" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.183505 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8c3719c-2c5b-489c-a1ae-4879a537b65a" containerName="extract" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.183868 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p47n6" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.185778 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.186183 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-kjvgb" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.186348 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.194087 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-p47n6"] Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.276202 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6gh2\" (UniqueName: \"kubernetes.io/projected/128e3d01-038d-4b02-91f6-b50124ff721a-kube-api-access-b6gh2\") pod \"nmstate-operator-858ddd8f98-p47n6\" (UID: \"128e3d01-038d-4b02-91f6-b50124ff721a\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-p47n6" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.377960 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6gh2\" (UniqueName: \"kubernetes.io/projected/128e3d01-038d-4b02-91f6-b50124ff721a-kube-api-access-b6gh2\") pod \"nmstate-operator-858ddd8f98-p47n6\" (UID: \"128e3d01-038d-4b02-91f6-b50124ff721a\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-p47n6" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.401462 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6gh2\" (UniqueName: \"kubernetes.io/projected/128e3d01-038d-4b02-91f6-b50124ff721a-kube-api-access-b6gh2\") pod \"nmstate-operator-858ddd8f98-p47n6\" (UID: \"128e3d01-038d-4b02-91f6-b50124ff721a\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-p47n6" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.500797 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p47n6" Oct 07 15:02:17 crc kubenswrapper[4672]: I1007 15:02:17.902460 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-p47n6"] Oct 07 15:02:18 crc kubenswrapper[4672]: I1007 15:02:18.016421 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p47n6" event={"ID":"128e3d01-038d-4b02-91f6-b50124ff721a","Type":"ContainerStarted","Data":"6ccec761d0201a17e99d3219724aa76926e58553c4e5bf55442916a886963f2f"} Oct 07 15:02:18 crc kubenswrapper[4672]: I1007 15:02:18.351855 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:18 crc kubenswrapper[4672]: I1007 15:02:18.352232 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:18 crc kubenswrapper[4672]: I1007 15:02:18.398460 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:19 crc kubenswrapper[4672]: I1007 15:02:19.058504 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:21 crc kubenswrapper[4672]: I1007 15:02:21.039264 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p47n6" event={"ID":"128e3d01-038d-4b02-91f6-b50124ff721a","Type":"ContainerStarted","Data":"a47ea7bff2ae754c724304e7c1f144f346435905216f8668f89121d6e65a0240"} Oct 07 15:02:21 crc kubenswrapper[4672]: I1007 15:02:21.056211 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p47n6" podStartSLOduration=1.63640814 podStartE2EDuration="4.056195241s" podCreationTimestamp="2025-10-07 15:02:17 +0000 UTC" firstStartedPulling="2025-10-07 15:02:17.912646402 +0000 UTC m=+814.887824983" lastFinishedPulling="2025-10-07 15:02:20.332433503 +0000 UTC m=+817.307612084" observedRunningTime="2025-10-07 15:02:21.053731761 +0000 UTC m=+818.028910362" watchObservedRunningTime="2025-10-07 15:02:21.056195241 +0000 UTC m=+818.031373822" Oct 07 15:02:21 crc kubenswrapper[4672]: I1007 15:02:21.979695 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xhdd7"] Oct 07 15:02:21 crc kubenswrapper[4672]: I1007 15:02:21.980715 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:21 crc kubenswrapper[4672]: I1007 15:02:21.986904 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhdd7"] Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.039302 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-catalog-content\") pod \"redhat-marketplace-xhdd7\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.039380 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-utilities\") pod \"redhat-marketplace-xhdd7\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.039485 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clbnc\" (UniqueName: \"kubernetes.io/projected/b2110a39-1616-410c-ba78-d77bfed7e142-kube-api-access-clbnc\") pod \"redhat-marketplace-xhdd7\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.141124 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-catalog-content\") pod \"redhat-marketplace-xhdd7\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.141208 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-utilities\") pod \"redhat-marketplace-xhdd7\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.141314 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clbnc\" (UniqueName: \"kubernetes.io/projected/b2110a39-1616-410c-ba78-d77bfed7e142-kube-api-access-clbnc\") pod \"redhat-marketplace-xhdd7\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.141704 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-catalog-content\") pod \"redhat-marketplace-xhdd7\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.141931 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-utilities\") pod \"redhat-marketplace-xhdd7\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.167207 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clbnc\" (UniqueName: \"kubernetes.io/projected/b2110a39-1616-410c-ba78-d77bfed7e142-kube-api-access-clbnc\") pod \"redhat-marketplace-xhdd7\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.168026 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ms84f"] Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.168241 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ms84f" podUID="94805e19-68fc-44a0-9402-c3c14498049c" containerName="registry-server" containerID="cri-o://9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f" gracePeriod=2 Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.301695 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.573853 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.653413 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-utilities\") pod \"94805e19-68fc-44a0-9402-c3c14498049c\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.653462 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-catalog-content\") pod \"94805e19-68fc-44a0-9402-c3c14498049c\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.653572 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czqxh\" (UniqueName: \"kubernetes.io/projected/94805e19-68fc-44a0-9402-c3c14498049c-kube-api-access-czqxh\") pod \"94805e19-68fc-44a0-9402-c3c14498049c\" (UID: \"94805e19-68fc-44a0-9402-c3c14498049c\") " Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.654657 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-utilities" (OuterVolumeSpecName: "utilities") pod "94805e19-68fc-44a0-9402-c3c14498049c" (UID: "94805e19-68fc-44a0-9402-c3c14498049c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.660506 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94805e19-68fc-44a0-9402-c3c14498049c-kube-api-access-czqxh" (OuterVolumeSpecName: "kube-api-access-czqxh") pod "94805e19-68fc-44a0-9402-c3c14498049c" (UID: "94805e19-68fc-44a0-9402-c3c14498049c"). InnerVolumeSpecName "kube-api-access-czqxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.741256 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94805e19-68fc-44a0-9402-c3c14498049c" (UID: "94805e19-68fc-44a0-9402-c3c14498049c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.756351 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czqxh\" (UniqueName: \"kubernetes.io/projected/94805e19-68fc-44a0-9402-c3c14498049c-kube-api-access-czqxh\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.756421 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.756433 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94805e19-68fc-44a0-9402-c3c14498049c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:22 crc kubenswrapper[4672]: I1007 15:02:22.801306 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhdd7"] Oct 07 15:02:22 crc kubenswrapper[4672]: W1007 15:02:22.810661 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2110a39_1616_410c_ba78_d77bfed7e142.slice/crio-68b2242c8fc2bf6436083aaac9a25492b0f79d729e98a31c339218248d4b328c WatchSource:0}: Error finding container 68b2242c8fc2bf6436083aaac9a25492b0f79d729e98a31c339218248d4b328c: Status 404 returned error can't find the container with id 68b2242c8fc2bf6436083aaac9a25492b0f79d729e98a31c339218248d4b328c Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.052003 4672 generic.go:334] "Generic (PLEG): container finished" podID="b2110a39-1616-410c-ba78-d77bfed7e142" containerID="0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59" exitCode=0 Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.052058 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhdd7" event={"ID":"b2110a39-1616-410c-ba78-d77bfed7e142","Type":"ContainerDied","Data":"0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59"} Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.052391 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhdd7" event={"ID":"b2110a39-1616-410c-ba78-d77bfed7e142","Type":"ContainerStarted","Data":"68b2242c8fc2bf6436083aaac9a25492b0f79d729e98a31c339218248d4b328c"} Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.054914 4672 generic.go:334] "Generic (PLEG): container finished" podID="94805e19-68fc-44a0-9402-c3c14498049c" containerID="9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f" exitCode=0 Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.054951 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms84f" event={"ID":"94805e19-68fc-44a0-9402-c3c14498049c","Type":"ContainerDied","Data":"9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f"} Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.054974 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms84f" event={"ID":"94805e19-68fc-44a0-9402-c3c14498049c","Type":"ContainerDied","Data":"f1da267bca9899337616792ad25ba50c9ad097fe5ee4126864d640aa1c0be526"} Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.054990 4672 scope.go:117] "RemoveContainer" containerID="9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.055101 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ms84f" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.075682 4672 scope.go:117] "RemoveContainer" containerID="127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.090100 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ms84f"] Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.093505 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ms84f"] Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.117646 4672 scope.go:117] "RemoveContainer" containerID="f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.130760 4672 scope.go:117] "RemoveContainer" containerID="9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f" Oct 07 15:02:23 crc kubenswrapper[4672]: E1007 15:02:23.131263 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f\": container with ID starting with 9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f not found: ID does not exist" containerID="9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.131315 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f"} err="failed to get container status \"9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f\": rpc error: code = NotFound desc = could not find container \"9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f\": container with ID starting with 9a31950ba6f0f490b9e87153d068516f548e65545b487123f35a8fc24d621a6f not found: ID does not exist" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.131342 4672 scope.go:117] "RemoveContainer" containerID="127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b" Oct 07 15:02:23 crc kubenswrapper[4672]: E1007 15:02:23.131743 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b\": container with ID starting with 127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b not found: ID does not exist" containerID="127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.131785 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b"} err="failed to get container status \"127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b\": rpc error: code = NotFound desc = could not find container \"127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b\": container with ID starting with 127aab314d0545c5221bea8d0aa03ef57521b3b19cbaea134e6701cab39a3b2b not found: ID does not exist" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.131850 4672 scope.go:117] "RemoveContainer" containerID="f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3" Oct 07 15:02:23 crc kubenswrapper[4672]: E1007 15:02:23.132403 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3\": container with ID starting with f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3 not found: ID does not exist" containerID="f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.132433 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3"} err="failed to get container status \"f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3\": rpc error: code = NotFound desc = could not find container \"f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3\": container with ID starting with f59ea7ce26af23c7b2df1fade38be3c718210619bdd1d9f35e1bf4f183ad69f3 not found: ID does not exist" Oct 07 15:02:23 crc kubenswrapper[4672]: I1007 15:02:23.900829 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94805e19-68fc-44a0-9402-c3c14498049c" path="/var/lib/kubelet/pods/94805e19-68fc-44a0-9402-c3c14498049c/volumes" Oct 07 15:02:25 crc kubenswrapper[4672]: I1007 15:02:25.079387 4672 generic.go:334] "Generic (PLEG): container finished" podID="b2110a39-1616-410c-ba78-d77bfed7e142" containerID="a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0" exitCode=0 Oct 07 15:02:25 crc kubenswrapper[4672]: I1007 15:02:25.079472 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhdd7" event={"ID":"b2110a39-1616-410c-ba78-d77bfed7e142","Type":"ContainerDied","Data":"a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0"} Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.085949 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhdd7" event={"ID":"b2110a39-1616-410c-ba78-d77bfed7e142","Type":"ContainerStarted","Data":"0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de"} Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.102334 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xhdd7" podStartSLOduration=2.68143771 podStartE2EDuration="5.102314983s" podCreationTimestamp="2025-10-07 15:02:21 +0000 UTC" firstStartedPulling="2025-10-07 15:02:23.053967448 +0000 UTC m=+820.029146029" lastFinishedPulling="2025-10-07 15:02:25.474844721 +0000 UTC m=+822.450023302" observedRunningTime="2025-10-07 15:02:26.100736948 +0000 UTC m=+823.075915549" watchObservedRunningTime="2025-10-07 15:02:26.102314983 +0000 UTC m=+823.077493564" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.192589 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd"] Oct 07 15:02:26 crc kubenswrapper[4672]: E1007 15:02:26.192806 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94805e19-68fc-44a0-9402-c3c14498049c" containerName="extract-content" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.192818 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="94805e19-68fc-44a0-9402-c3c14498049c" containerName="extract-content" Oct 07 15:02:26 crc kubenswrapper[4672]: E1007 15:02:26.192830 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94805e19-68fc-44a0-9402-c3c14498049c" containerName="extract-utilities" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.192836 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="94805e19-68fc-44a0-9402-c3c14498049c" containerName="extract-utilities" Oct 07 15:02:26 crc kubenswrapper[4672]: E1007 15:02:26.192856 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94805e19-68fc-44a0-9402-c3c14498049c" containerName="registry-server" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.192863 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="94805e19-68fc-44a0-9402-c3c14498049c" containerName="registry-server" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.192967 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="94805e19-68fc-44a0-9402-c3c14498049c" containerName="registry-server" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.193513 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.196535 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-zfkbb" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.227473 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-v685k"] Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.228395 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.231233 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.259452 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-lbdvn"] Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.260478 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.266324 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-v685k"] Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.280129 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd"] Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.307027 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/38637aa1-8ff7-4b40-b3b6-eed0f91514f6-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v685k\" (UID: \"38637aa1-8ff7-4b40-b3b6-eed0f91514f6\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.307099 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-dbus-socket\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.307142 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdnqs\" (UniqueName: \"kubernetes.io/projected/38637aa1-8ff7-4b40-b3b6-eed0f91514f6-kube-api-access-hdnqs\") pod \"nmstate-webhook-6cdbc54649-v685k\" (UID: \"38637aa1-8ff7-4b40-b3b6-eed0f91514f6\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.307168 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c6gt\" (UniqueName: \"kubernetes.io/projected/011af584-1b04-4f6c-9cb1-48e1adac8d81-kube-api-access-5c6gt\") pod \"nmstate-metrics-fdff9cb8d-2lmqd\" (UID: \"011af584-1b04-4f6c-9cb1-48e1adac8d81\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.307184 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s94np\" (UniqueName: \"kubernetes.io/projected/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-kube-api-access-s94np\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.307202 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-ovs-socket\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.307225 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-nmstate-lock\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.341429 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q"] Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.342388 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.344641 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.345042 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-54qz9" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.351820 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q"] Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.352056 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409106 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdnqs\" (UniqueName: \"kubernetes.io/projected/38637aa1-8ff7-4b40-b3b6-eed0f91514f6-kube-api-access-hdnqs\") pod \"nmstate-webhook-6cdbc54649-v685k\" (UID: \"38637aa1-8ff7-4b40-b3b6-eed0f91514f6\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409169 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-qpp9q\" (UID: \"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409215 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c6gt\" (UniqueName: \"kubernetes.io/projected/011af584-1b04-4f6c-9cb1-48e1adac8d81-kube-api-access-5c6gt\") pod \"nmstate-metrics-fdff9cb8d-2lmqd\" (UID: \"011af584-1b04-4f6c-9cb1-48e1adac8d81\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409241 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc2rq\" (UniqueName: \"kubernetes.io/projected/c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e-kube-api-access-kc2rq\") pod \"nmstate-console-plugin-6b874cbd85-qpp9q\" (UID: \"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409272 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s94np\" (UniqueName: \"kubernetes.io/projected/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-kube-api-access-s94np\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409351 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-ovs-socket\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409439 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-ovs-socket\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409472 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-nmstate-lock\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409498 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/38637aa1-8ff7-4b40-b3b6-eed0f91514f6-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v685k\" (UID: \"38637aa1-8ff7-4b40-b3b6-eed0f91514f6\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409563 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-dbus-socket\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: E1007 15:02:26.409628 4672 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409556 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-nmstate-lock\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: E1007 15:02:26.409697 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38637aa1-8ff7-4b40-b3b6-eed0f91514f6-tls-key-pair podName:38637aa1-8ff7-4b40-b3b6-eed0f91514f6 nodeName:}" failed. No retries permitted until 2025-10-07 15:02:26.909676847 +0000 UTC m=+823.884855428 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/38637aa1-8ff7-4b40-b3b6-eed0f91514f6-tls-key-pair") pod "nmstate-webhook-6cdbc54649-v685k" (UID: "38637aa1-8ff7-4b40-b3b6-eed0f91514f6") : secret "openshift-nmstate-webhook" not found Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409865 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-dbus-socket\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.409942 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-qpp9q\" (UID: \"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.429135 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s94np\" (UniqueName: \"kubernetes.io/projected/8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd-kube-api-access-s94np\") pod \"nmstate-handler-lbdvn\" (UID: \"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd\") " pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.431358 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdnqs\" (UniqueName: \"kubernetes.io/projected/38637aa1-8ff7-4b40-b3b6-eed0f91514f6-kube-api-access-hdnqs\") pod \"nmstate-webhook-6cdbc54649-v685k\" (UID: \"38637aa1-8ff7-4b40-b3b6-eed0f91514f6\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.431714 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c6gt\" (UniqueName: \"kubernetes.io/projected/011af584-1b04-4f6c-9cb1-48e1adac8d81-kube-api-access-5c6gt\") pod \"nmstate-metrics-fdff9cb8d-2lmqd\" (UID: \"011af584-1b04-4f6c-9cb1-48e1adac8d81\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.511617 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-qpp9q\" (UID: \"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.511672 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-qpp9q\" (UID: \"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.511704 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc2rq\" (UniqueName: \"kubernetes.io/projected/c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e-kube-api-access-kc2rq\") pod \"nmstate-console-plugin-6b874cbd85-qpp9q\" (UID: \"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.513932 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-qpp9q\" (UID: \"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.515599 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.516499 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-qpp9q\" (UID: \"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.536008 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc2rq\" (UniqueName: \"kubernetes.io/projected/c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e-kube-api-access-kc2rq\") pod \"nmstate-console-plugin-6b874cbd85-qpp9q\" (UID: \"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.570316 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7d4d5c677b-m5d42"] Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.573820 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.582045 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.589281 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7d4d5c677b-m5d42"] Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.612989 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ad19649-d0d7-4ed0-9d29-135556f3f362-console-serving-cert\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.613109 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-oauth-serving-cert\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.613142 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwxgf\" (UniqueName: \"kubernetes.io/projected/3ad19649-d0d7-4ed0-9d29-135556f3f362-kube-api-access-wwxgf\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.613184 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3ad19649-d0d7-4ed0-9d29-135556f3f362-console-oauth-config\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.613217 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-service-ca\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.613275 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-trusted-ca-bundle\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.613308 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-console-config\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: W1007 15:02:26.631932 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8672cc28_f5df_4e9d_8ff8_bcbff6c0b6fd.slice/crio-ca9b8040611f2ef87546e3f5b19de2eb8507ad6e861c40ce97ea293efc6ced66 WatchSource:0}: Error finding container ca9b8040611f2ef87546e3f5b19de2eb8507ad6e861c40ce97ea293efc6ced66: Status 404 returned error can't find the container with id ca9b8040611f2ef87546e3f5b19de2eb8507ad6e861c40ce97ea293efc6ced66 Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.662688 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.715163 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ad19649-d0d7-4ed0-9d29-135556f3f362-console-serving-cert\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.715236 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-oauth-serving-cert\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.715264 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwxgf\" (UniqueName: \"kubernetes.io/projected/3ad19649-d0d7-4ed0-9d29-135556f3f362-kube-api-access-wwxgf\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.715301 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3ad19649-d0d7-4ed0-9d29-135556f3f362-console-oauth-config\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.715328 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-service-ca\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.715437 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-trusted-ca-bundle\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.715469 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-console-config\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.716803 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-console-config\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.717390 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-service-ca\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.717760 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-trusted-ca-bundle\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.717886 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3ad19649-d0d7-4ed0-9d29-135556f3f362-oauth-serving-cert\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.721103 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3ad19649-d0d7-4ed0-9d29-135556f3f362-console-oauth-config\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.722727 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ad19649-d0d7-4ed0-9d29-135556f3f362-console-serving-cert\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.735733 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwxgf\" (UniqueName: \"kubernetes.io/projected/3ad19649-d0d7-4ed0-9d29-135556f3f362-kube-api-access-wwxgf\") pod \"console-7d4d5c677b-m5d42\" (UID: \"3ad19649-d0d7-4ed0-9d29-135556f3f362\") " pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.787924 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd"] Oct 07 15:02:26 crc kubenswrapper[4672]: W1007 15:02:26.794822 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod011af584_1b04_4f6c_9cb1_48e1adac8d81.slice/crio-945e9356d052b2e7b720d1b0350f8242e0f1ff043cbab2e37ffe37ceb64065c2 WatchSource:0}: Error finding container 945e9356d052b2e7b720d1b0350f8242e0f1ff043cbab2e37ffe37ceb64065c2: Status 404 returned error can't find the container with id 945e9356d052b2e7b720d1b0350f8242e0f1ff043cbab2e37ffe37ceb64065c2 Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.908084 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.919671 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/38637aa1-8ff7-4b40-b3b6-eed0f91514f6-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v685k\" (UID: \"38637aa1-8ff7-4b40-b3b6-eed0f91514f6\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:26 crc kubenswrapper[4672]: I1007 15:02:26.922641 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/38637aa1-8ff7-4b40-b3b6-eed0f91514f6-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v685k\" (UID: \"38637aa1-8ff7-4b40-b3b6-eed0f91514f6\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:27 crc kubenswrapper[4672]: I1007 15:02:27.086817 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q"] Oct 07 15:02:27 crc kubenswrapper[4672]: I1007 15:02:27.094006 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-lbdvn" event={"ID":"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd","Type":"ContainerStarted","Data":"ca9b8040611f2ef87546e3f5b19de2eb8507ad6e861c40ce97ea293efc6ced66"} Oct 07 15:02:27 crc kubenswrapper[4672]: I1007 15:02:27.095281 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd" event={"ID":"011af584-1b04-4f6c-9cb1-48e1adac8d81","Type":"ContainerStarted","Data":"945e9356d052b2e7b720d1b0350f8242e0f1ff043cbab2e37ffe37ceb64065c2"} Oct 07 15:02:27 crc kubenswrapper[4672]: I1007 15:02:27.144244 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:27 crc kubenswrapper[4672]: I1007 15:02:27.283402 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7d4d5c677b-m5d42"] Oct 07 15:02:27 crc kubenswrapper[4672]: W1007 15:02:27.292273 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ad19649_d0d7_4ed0_9d29_135556f3f362.slice/crio-bae8aadd4a83cfed86703f1e8bddefdbca4462947cb585da584d8321c01f8930 WatchSource:0}: Error finding container bae8aadd4a83cfed86703f1e8bddefdbca4462947cb585da584d8321c01f8930: Status 404 returned error can't find the container with id bae8aadd4a83cfed86703f1e8bddefdbca4462947cb585da584d8321c01f8930 Oct 07 15:02:27 crc kubenswrapper[4672]: I1007 15:02:27.553218 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-v685k"] Oct 07 15:02:28 crc kubenswrapper[4672]: I1007 15:02:28.103245 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" event={"ID":"38637aa1-8ff7-4b40-b3b6-eed0f91514f6","Type":"ContainerStarted","Data":"4e219440f9bf4a283f77ca9afba7de3aca6a8cc756ab55ebe63a24458d47c647"} Oct 07 15:02:28 crc kubenswrapper[4672]: I1007 15:02:28.104999 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7d4d5c677b-m5d42" event={"ID":"3ad19649-d0d7-4ed0-9d29-135556f3f362","Type":"ContainerStarted","Data":"c306b99b3df0c25641f60decf4fe60966ae608029e69038ea4235c704634a65d"} Oct 07 15:02:28 crc kubenswrapper[4672]: I1007 15:02:28.105136 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7d4d5c677b-m5d42" event={"ID":"3ad19649-d0d7-4ed0-9d29-135556f3f362","Type":"ContainerStarted","Data":"bae8aadd4a83cfed86703f1e8bddefdbca4462947cb585da584d8321c01f8930"} Oct 07 15:02:28 crc kubenswrapper[4672]: I1007 15:02:28.107922 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" event={"ID":"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e","Type":"ContainerStarted","Data":"51f8212e07cbd9c28d8e671c13c8813af0936d61ab8fae3f3efe11669faac7d9"} Oct 07 15:02:28 crc kubenswrapper[4672]: I1007 15:02:28.136387 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7d4d5c677b-m5d42" podStartSLOduration=2.136349715 podStartE2EDuration="2.136349715s" podCreationTimestamp="2025-10-07 15:02:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:02:28.126137394 +0000 UTC m=+825.101315985" watchObservedRunningTime="2025-10-07 15:02:28.136349715 +0000 UTC m=+825.111528296" Oct 07 15:02:30 crc kubenswrapper[4672]: I1007 15:02:30.119175 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd" event={"ID":"011af584-1b04-4f6c-9cb1-48e1adac8d81","Type":"ContainerStarted","Data":"da701907d6f913eb35464a5fd936edca3d4a5a03efeaf8950f6c841973d8e77d"} Oct 07 15:02:30 crc kubenswrapper[4672]: I1007 15:02:30.120732 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" event={"ID":"38637aa1-8ff7-4b40-b3b6-eed0f91514f6","Type":"ContainerStarted","Data":"1535d36650e4cacec5db7df3c1374f38af47f039cd2d2fb8e780a837674834a3"} Oct 07 15:02:30 crc kubenswrapper[4672]: I1007 15:02:30.120845 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:30 crc kubenswrapper[4672]: I1007 15:02:30.122605 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-lbdvn" event={"ID":"8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd","Type":"ContainerStarted","Data":"74d82047004854883904d57d6c6b36aaa83a5e686a63d5c928c813216dc458c0"} Oct 07 15:02:30 crc kubenswrapper[4672]: I1007 15:02:30.122900 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:30 crc kubenswrapper[4672]: I1007 15:02:30.124851 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" event={"ID":"c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e","Type":"ContainerStarted","Data":"dbd4f8f6cc7d7e101a8fdec0ac883a13bebb166e7297054a0d1be72fe9191be8"} Oct 07 15:02:30 crc kubenswrapper[4672]: I1007 15:02:30.134374 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" podStartSLOduration=1.959256285 podStartE2EDuration="4.134357089s" podCreationTimestamp="2025-10-07 15:02:26 +0000 UTC" firstStartedPulling="2025-10-07 15:02:27.565107046 +0000 UTC m=+824.540285627" lastFinishedPulling="2025-10-07 15:02:29.74020785 +0000 UTC m=+826.715386431" observedRunningTime="2025-10-07 15:02:30.134283977 +0000 UTC m=+827.109462558" watchObservedRunningTime="2025-10-07 15:02:30.134357089 +0000 UTC m=+827.109535670" Oct 07 15:02:30 crc kubenswrapper[4672]: I1007 15:02:30.170130 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-lbdvn" podStartSLOduration=1.062325138 podStartE2EDuration="4.170106038s" podCreationTimestamp="2025-10-07 15:02:26 +0000 UTC" firstStartedPulling="2025-10-07 15:02:26.634267492 +0000 UTC m=+823.609446073" lastFinishedPulling="2025-10-07 15:02:29.742048402 +0000 UTC m=+826.717226973" observedRunningTime="2025-10-07 15:02:30.167116833 +0000 UTC m=+827.142295414" watchObservedRunningTime="2025-10-07 15:02:30.170106038 +0000 UTC m=+827.145284619" Oct 07 15:02:32 crc kubenswrapper[4672]: I1007 15:02:32.302628 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:32 crc kubenswrapper[4672]: I1007 15:02:32.303207 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:32 crc kubenswrapper[4672]: I1007 15:02:32.348368 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:32 crc kubenswrapper[4672]: I1007 15:02:32.366512 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-qpp9q" podStartSLOduration=3.719395736 podStartE2EDuration="6.36649598s" podCreationTimestamp="2025-10-07 15:02:26 +0000 UTC" firstStartedPulling="2025-10-07 15:02:27.092173909 +0000 UTC m=+824.067352490" lastFinishedPulling="2025-10-07 15:02:29.739274153 +0000 UTC m=+826.714452734" observedRunningTime="2025-10-07 15:02:30.186386902 +0000 UTC m=+827.161565483" watchObservedRunningTime="2025-10-07 15:02:32.36649598 +0000 UTC m=+829.341674561" Oct 07 15:02:33 crc kubenswrapper[4672]: I1007 15:02:33.141510 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd" event={"ID":"011af584-1b04-4f6c-9cb1-48e1adac8d81","Type":"ContainerStarted","Data":"e7634752ba143b678925ab44a55c5947a9fa4fc3a2e370f0556b2f035869528e"} Oct 07 15:02:33 crc kubenswrapper[4672]: I1007 15:02:33.160258 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-2lmqd" podStartSLOduration=1.893982373 podStartE2EDuration="7.160236473s" podCreationTimestamp="2025-10-07 15:02:26 +0000 UTC" firstStartedPulling="2025-10-07 15:02:26.798957848 +0000 UTC m=+823.774136429" lastFinishedPulling="2025-10-07 15:02:32.065211958 +0000 UTC m=+829.040390529" observedRunningTime="2025-10-07 15:02:33.155338823 +0000 UTC m=+830.130517414" watchObservedRunningTime="2025-10-07 15:02:33.160236473 +0000 UTC m=+830.135415054" Oct 07 15:02:33 crc kubenswrapper[4672]: I1007 15:02:33.183628 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:34 crc kubenswrapper[4672]: I1007 15:02:34.962976 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhdd7"] Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.150103 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xhdd7" podUID="b2110a39-1616-410c-ba78-d77bfed7e142" containerName="registry-server" containerID="cri-o://0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de" gracePeriod=2 Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.492232 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.551576 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-utilities\") pod \"b2110a39-1616-410c-ba78-d77bfed7e142\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.551773 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clbnc\" (UniqueName: \"kubernetes.io/projected/b2110a39-1616-410c-ba78-d77bfed7e142-kube-api-access-clbnc\") pod \"b2110a39-1616-410c-ba78-d77bfed7e142\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.551929 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-catalog-content\") pod \"b2110a39-1616-410c-ba78-d77bfed7e142\" (UID: \"b2110a39-1616-410c-ba78-d77bfed7e142\") " Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.552727 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-utilities" (OuterVolumeSpecName: "utilities") pod "b2110a39-1616-410c-ba78-d77bfed7e142" (UID: "b2110a39-1616-410c-ba78-d77bfed7e142"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.560318 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2110a39-1616-410c-ba78-d77bfed7e142-kube-api-access-clbnc" (OuterVolumeSpecName: "kube-api-access-clbnc") pod "b2110a39-1616-410c-ba78-d77bfed7e142" (UID: "b2110a39-1616-410c-ba78-d77bfed7e142"). InnerVolumeSpecName "kube-api-access-clbnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.564290 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2110a39-1616-410c-ba78-d77bfed7e142" (UID: "b2110a39-1616-410c-ba78-d77bfed7e142"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.653131 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.653169 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2110a39-1616-410c-ba78-d77bfed7e142-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:35 crc kubenswrapper[4672]: I1007 15:02:35.653181 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clbnc\" (UniqueName: \"kubernetes.io/projected/b2110a39-1616-410c-ba78-d77bfed7e142-kube-api-access-clbnc\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.162127 4672 generic.go:334] "Generic (PLEG): container finished" podID="b2110a39-1616-410c-ba78-d77bfed7e142" containerID="0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de" exitCode=0 Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.162197 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhdd7" event={"ID":"b2110a39-1616-410c-ba78-d77bfed7e142","Type":"ContainerDied","Data":"0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de"} Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.162976 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhdd7" event={"ID":"b2110a39-1616-410c-ba78-d77bfed7e142","Type":"ContainerDied","Data":"68b2242c8fc2bf6436083aaac9a25492b0f79d729e98a31c339218248d4b328c"} Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.162228 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhdd7" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.163011 4672 scope.go:117] "RemoveContainer" containerID="0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.190475 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhdd7"] Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.191320 4672 scope.go:117] "RemoveContainer" containerID="a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.195365 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhdd7"] Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.211626 4672 scope.go:117] "RemoveContainer" containerID="0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.230860 4672 scope.go:117] "RemoveContainer" containerID="0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de" Oct 07 15:02:36 crc kubenswrapper[4672]: E1007 15:02:36.231417 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de\": container with ID starting with 0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de not found: ID does not exist" containerID="0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.231457 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de"} err="failed to get container status \"0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de\": rpc error: code = NotFound desc = could not find container \"0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de\": container with ID starting with 0221fd1b3ee172bec9b556986402efbd6178ec507a55de4f8510b425dbff54de not found: ID does not exist" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.231479 4672 scope.go:117] "RemoveContainer" containerID="a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0" Oct 07 15:02:36 crc kubenswrapper[4672]: E1007 15:02:36.231981 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0\": container with ID starting with a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0 not found: ID does not exist" containerID="a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.232186 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0"} err="failed to get container status \"a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0\": rpc error: code = NotFound desc = could not find container \"a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0\": container with ID starting with a61fde84a33884d3e0502a1922b2468c375e0bbb9bbf2b2fb2363d0b64e1fab0 not found: ID does not exist" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.232207 4672 scope.go:117] "RemoveContainer" containerID="0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59" Oct 07 15:02:36 crc kubenswrapper[4672]: E1007 15:02:36.232556 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59\": container with ID starting with 0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59 not found: ID does not exist" containerID="0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.232574 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59"} err="failed to get container status \"0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59\": rpc error: code = NotFound desc = could not find container \"0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59\": container with ID starting with 0295dfc32f7e3eb09c8d71a4ce9ccb680cd753ef4d0e09685979fba39d829c59 not found: ID does not exist" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.608609 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-lbdvn" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.908813 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.908904 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:36 crc kubenswrapper[4672]: I1007 15:02:36.915959 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:37 crc kubenswrapper[4672]: I1007 15:02:37.174068 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7d4d5c677b-m5d42" Oct 07 15:02:37 crc kubenswrapper[4672]: I1007 15:02:37.221067 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-5547l"] Oct 07 15:02:37 crc kubenswrapper[4672]: I1007 15:02:37.898361 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2110a39-1616-410c-ba78-d77bfed7e142" path="/var/lib/kubelet/pods/b2110a39-1616-410c-ba78-d77bfed7e142/volumes" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.180277 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2t8rh"] Oct 07 15:02:43 crc kubenswrapper[4672]: E1007 15:02:43.180863 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2110a39-1616-410c-ba78-d77bfed7e142" containerName="extract-utilities" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.180879 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2110a39-1616-410c-ba78-d77bfed7e142" containerName="extract-utilities" Oct 07 15:02:43 crc kubenswrapper[4672]: E1007 15:02:43.180890 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2110a39-1616-410c-ba78-d77bfed7e142" containerName="registry-server" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.180897 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2110a39-1616-410c-ba78-d77bfed7e142" containerName="registry-server" Oct 07 15:02:43 crc kubenswrapper[4672]: E1007 15:02:43.180914 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2110a39-1616-410c-ba78-d77bfed7e142" containerName="extract-content" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.180921 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2110a39-1616-410c-ba78-d77bfed7e142" containerName="extract-content" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.181043 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2110a39-1616-410c-ba78-d77bfed7e142" containerName="registry-server" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.181998 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.189524 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2t8rh"] Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.251754 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rgxd\" (UniqueName: \"kubernetes.io/projected/bdbb2134-d518-4a84-818c-9611df993218-kube-api-access-7rgxd\") pod \"community-operators-2t8rh\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.251879 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-utilities\") pod \"community-operators-2t8rh\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.251947 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-catalog-content\") pod \"community-operators-2t8rh\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.352744 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-catalog-content\") pod \"community-operators-2t8rh\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.352817 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rgxd\" (UniqueName: \"kubernetes.io/projected/bdbb2134-d518-4a84-818c-9611df993218-kube-api-access-7rgxd\") pod \"community-operators-2t8rh\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.352859 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-utilities\") pod \"community-operators-2t8rh\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.353398 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-catalog-content\") pod \"community-operators-2t8rh\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.353465 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-utilities\") pod \"community-operators-2t8rh\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.371507 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rgxd\" (UniqueName: \"kubernetes.io/projected/bdbb2134-d518-4a84-818c-9611df993218-kube-api-access-7rgxd\") pod \"community-operators-2t8rh\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.511501 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:43 crc kubenswrapper[4672]: I1007 15:02:43.949705 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2t8rh"] Oct 07 15:02:43 crc kubenswrapper[4672]: W1007 15:02:43.956561 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbdbb2134_d518_4a84_818c_9611df993218.slice/crio-f9c1a4020275bdb610d0eab23eb0376ac5e8e52c21f0829aa09b1d5aef3001ec WatchSource:0}: Error finding container f9c1a4020275bdb610d0eab23eb0376ac5e8e52c21f0829aa09b1d5aef3001ec: Status 404 returned error can't find the container with id f9c1a4020275bdb610d0eab23eb0376ac5e8e52c21f0829aa09b1d5aef3001ec Oct 07 15:02:44 crc kubenswrapper[4672]: I1007 15:02:44.212732 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t8rh" event={"ID":"bdbb2134-d518-4a84-818c-9611df993218","Type":"ContainerStarted","Data":"f9c1a4020275bdb610d0eab23eb0376ac5e8e52c21f0829aa09b1d5aef3001ec"} Oct 07 15:02:45 crc kubenswrapper[4672]: I1007 15:02:45.220322 4672 generic.go:334] "Generic (PLEG): container finished" podID="bdbb2134-d518-4a84-818c-9611df993218" containerID="dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38" exitCode=0 Oct 07 15:02:45 crc kubenswrapper[4672]: I1007 15:02:45.220372 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t8rh" event={"ID":"bdbb2134-d518-4a84-818c-9611df993218","Type":"ContainerDied","Data":"dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38"} Oct 07 15:02:46 crc kubenswrapper[4672]: I1007 15:02:46.227413 4672 generic.go:334] "Generic (PLEG): container finished" podID="bdbb2134-d518-4a84-818c-9611df993218" containerID="64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656" exitCode=0 Oct 07 15:02:46 crc kubenswrapper[4672]: I1007 15:02:46.227506 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t8rh" event={"ID":"bdbb2134-d518-4a84-818c-9611df993218","Type":"ContainerDied","Data":"64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656"} Oct 07 15:02:47 crc kubenswrapper[4672]: I1007 15:02:47.150040 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v685k" Oct 07 15:02:47 crc kubenswrapper[4672]: I1007 15:02:47.237062 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t8rh" event={"ID":"bdbb2134-d518-4a84-818c-9611df993218","Type":"ContainerStarted","Data":"5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67"} Oct 07 15:02:47 crc kubenswrapper[4672]: I1007 15:02:47.257191 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2t8rh" podStartSLOduration=2.647160672 podStartE2EDuration="4.257173954s" podCreationTimestamp="2025-10-07 15:02:43 +0000 UTC" firstStartedPulling="2025-10-07 15:02:45.221781294 +0000 UTC m=+842.196959875" lastFinishedPulling="2025-10-07 15:02:46.831794586 +0000 UTC m=+843.806973157" observedRunningTime="2025-10-07 15:02:47.25076966 +0000 UTC m=+844.225948251" watchObservedRunningTime="2025-10-07 15:02:47.257173954 +0000 UTC m=+844.232352535" Oct 07 15:02:53 crc kubenswrapper[4672]: I1007 15:02:53.512005 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:53 crc kubenswrapper[4672]: I1007 15:02:53.512538 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:53 crc kubenswrapper[4672]: I1007 15:02:53.553559 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:54 crc kubenswrapper[4672]: I1007 15:02:54.311079 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.163612 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2t8rh"] Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.281345 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2t8rh" podUID="bdbb2134-d518-4a84-818c-9611df993218" containerName="registry-server" containerID="cri-o://5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67" gracePeriod=2 Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.662416 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.823642 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-utilities\") pod \"bdbb2134-d518-4a84-818c-9611df993218\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.824002 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-catalog-content\") pod \"bdbb2134-d518-4a84-818c-9611df993218\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.824069 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rgxd\" (UniqueName: \"kubernetes.io/projected/bdbb2134-d518-4a84-818c-9611df993218-kube-api-access-7rgxd\") pod \"bdbb2134-d518-4a84-818c-9611df993218\" (UID: \"bdbb2134-d518-4a84-818c-9611df993218\") " Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.824692 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-utilities" (OuterVolumeSpecName: "utilities") pod "bdbb2134-d518-4a84-818c-9611df993218" (UID: "bdbb2134-d518-4a84-818c-9611df993218"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.830403 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdbb2134-d518-4a84-818c-9611df993218-kube-api-access-7rgxd" (OuterVolumeSpecName: "kube-api-access-7rgxd") pod "bdbb2134-d518-4a84-818c-9611df993218" (UID: "bdbb2134-d518-4a84-818c-9611df993218"). InnerVolumeSpecName "kube-api-access-7rgxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.925900 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:56 crc kubenswrapper[4672]: I1007 15:02:56.925935 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rgxd\" (UniqueName: \"kubernetes.io/projected/bdbb2134-d518-4a84-818c-9611df993218-kube-api-access-7rgxd\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.170550 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bdbb2134-d518-4a84-818c-9611df993218" (UID: "bdbb2134-d518-4a84-818c-9611df993218"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.228655 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdbb2134-d518-4a84-818c-9611df993218-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.288759 4672 generic.go:334] "Generic (PLEG): container finished" podID="bdbb2134-d518-4a84-818c-9611df993218" containerID="5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67" exitCode=0 Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.288800 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t8rh" event={"ID":"bdbb2134-d518-4a84-818c-9611df993218","Type":"ContainerDied","Data":"5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67"} Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.288828 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t8rh" event={"ID":"bdbb2134-d518-4a84-818c-9611df993218","Type":"ContainerDied","Data":"f9c1a4020275bdb610d0eab23eb0376ac5e8e52c21f0829aa09b1d5aef3001ec"} Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.288843 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2t8rh" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.288853 4672 scope.go:117] "RemoveContainer" containerID="5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.308291 4672 scope.go:117] "RemoveContainer" containerID="64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.324627 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2t8rh"] Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.328335 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2t8rh"] Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.347604 4672 scope.go:117] "RemoveContainer" containerID="dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.366412 4672 scope.go:117] "RemoveContainer" containerID="5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67" Oct 07 15:02:57 crc kubenswrapper[4672]: E1007 15:02:57.366787 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67\": container with ID starting with 5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67 not found: ID does not exist" containerID="5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.366829 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67"} err="failed to get container status \"5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67\": rpc error: code = NotFound desc = could not find container \"5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67\": container with ID starting with 5d2fc6290d9238917e6b7bfcf0d0e7d44e1da8f6502379520b0b6db09efa7a67 not found: ID does not exist" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.366855 4672 scope.go:117] "RemoveContainer" containerID="64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656" Oct 07 15:02:57 crc kubenswrapper[4672]: E1007 15:02:57.367128 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656\": container with ID starting with 64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656 not found: ID does not exist" containerID="64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.367152 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656"} err="failed to get container status \"64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656\": rpc error: code = NotFound desc = could not find container \"64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656\": container with ID starting with 64cda73df3c3812f74db19be20becb4ba74e0f47551e2d0c9e3a7738ccdd6656 not found: ID does not exist" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.367166 4672 scope.go:117] "RemoveContainer" containerID="dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38" Oct 07 15:02:57 crc kubenswrapper[4672]: E1007 15:02:57.367538 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38\": container with ID starting with dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38 not found: ID does not exist" containerID="dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.367598 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38"} err="failed to get container status \"dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38\": rpc error: code = NotFound desc = could not find container \"dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38\": container with ID starting with dc9ef8346ab477c917977618f5caa76af6d7302ffa05b5e630265ffc6576fb38 not found: ID does not exist" Oct 07 15:02:57 crc kubenswrapper[4672]: I1007 15:02:57.898640 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdbb2134-d518-4a84-818c-9611df993218" path="/var/lib/kubelet/pods/bdbb2134-d518-4a84-818c-9611df993218/volumes" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.403903 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6"] Oct 07 15:02:59 crc kubenswrapper[4672]: E1007 15:02:59.404550 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdbb2134-d518-4a84-818c-9611df993218" containerName="extract-content" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.404570 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdbb2134-d518-4a84-818c-9611df993218" containerName="extract-content" Oct 07 15:02:59 crc kubenswrapper[4672]: E1007 15:02:59.404586 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdbb2134-d518-4a84-818c-9611df993218" containerName="extract-utilities" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.404597 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdbb2134-d518-4a84-818c-9611df993218" containerName="extract-utilities" Oct 07 15:02:59 crc kubenswrapper[4672]: E1007 15:02:59.404623 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdbb2134-d518-4a84-818c-9611df993218" containerName="registry-server" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.404635 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdbb2134-d518-4a84-818c-9611df993218" containerName="registry-server" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.404817 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdbb2134-d518-4a84-818c-9611df993218" containerName="registry-server" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.406152 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.408307 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.416260 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6"] Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.607642 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.607696 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77gp7\" (UniqueName: \"kubernetes.io/projected/00782a44-6ec2-425f-90d2-15ebc242cf3b-kube-api-access-77gp7\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.607738 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.709001 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.709138 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.709163 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77gp7\" (UniqueName: \"kubernetes.io/projected/00782a44-6ec2-425f-90d2-15ebc242cf3b-kube-api-access-77gp7\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.709532 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.709597 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:02:59 crc kubenswrapper[4672]: I1007 15:02:59.726432 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77gp7\" (UniqueName: \"kubernetes.io/projected/00782a44-6ec2-425f-90d2-15ebc242cf3b-kube-api-access-77gp7\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:03:00 crc kubenswrapper[4672]: I1007 15:03:00.023258 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:03:00 crc kubenswrapper[4672]: I1007 15:03:00.395057 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6"] Oct 07 15:03:01 crc kubenswrapper[4672]: I1007 15:03:01.313391 4672 generic.go:334] "Generic (PLEG): container finished" podID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerID="03f729b9ed41cce66b08721c600e399ee5ed8ab90fd86a69bee198e1af332cd4" exitCode=0 Oct 07 15:03:01 crc kubenswrapper[4672]: I1007 15:03:01.313432 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" event={"ID":"00782a44-6ec2-425f-90d2-15ebc242cf3b","Type":"ContainerDied","Data":"03f729b9ed41cce66b08721c600e399ee5ed8ab90fd86a69bee198e1af332cd4"} Oct 07 15:03:01 crc kubenswrapper[4672]: I1007 15:03:01.313458 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" event={"ID":"00782a44-6ec2-425f-90d2-15ebc242cf3b","Type":"ContainerStarted","Data":"f1f95bab78cdf4af80fdf6a5b38b469796aeb412b6ed872f7577c43ae5c951ef"} Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.266652 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-5547l" podUID="84ee05d6-7689-446b-a8b6-4e186bbbec44" containerName="console" containerID="cri-o://aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8" gracePeriod=15 Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.614580 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-5547l_84ee05d6-7689-446b-a8b6-4e186bbbec44/console/0.log" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.614838 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5547l" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.749155 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-service-ca\") pod \"84ee05d6-7689-446b-a8b6-4e186bbbec44\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.749210 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-serving-cert\") pod \"84ee05d6-7689-446b-a8b6-4e186bbbec44\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.749269 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42m28\" (UniqueName: \"kubernetes.io/projected/84ee05d6-7689-446b-a8b6-4e186bbbec44-kube-api-access-42m28\") pod \"84ee05d6-7689-446b-a8b6-4e186bbbec44\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.749308 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-oauth-serving-cert\") pod \"84ee05d6-7689-446b-a8b6-4e186bbbec44\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.749353 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-trusted-ca-bundle\") pod \"84ee05d6-7689-446b-a8b6-4e186bbbec44\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.749380 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-oauth-config\") pod \"84ee05d6-7689-446b-a8b6-4e186bbbec44\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.749425 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-config\") pod \"84ee05d6-7689-446b-a8b6-4e186bbbec44\" (UID: \"84ee05d6-7689-446b-a8b6-4e186bbbec44\") " Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.750595 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "84ee05d6-7689-446b-a8b6-4e186bbbec44" (UID: "84ee05d6-7689-446b-a8b6-4e186bbbec44"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.750680 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-config" (OuterVolumeSpecName: "console-config") pod "84ee05d6-7689-446b-a8b6-4e186bbbec44" (UID: "84ee05d6-7689-446b-a8b6-4e186bbbec44"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.750693 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-service-ca" (OuterVolumeSpecName: "service-ca") pod "84ee05d6-7689-446b-a8b6-4e186bbbec44" (UID: "84ee05d6-7689-446b-a8b6-4e186bbbec44"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.750819 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "84ee05d6-7689-446b-a8b6-4e186bbbec44" (UID: "84ee05d6-7689-446b-a8b6-4e186bbbec44"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.755767 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "84ee05d6-7689-446b-a8b6-4e186bbbec44" (UID: "84ee05d6-7689-446b-a8b6-4e186bbbec44"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.755957 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ee05d6-7689-446b-a8b6-4e186bbbec44-kube-api-access-42m28" (OuterVolumeSpecName: "kube-api-access-42m28") pod "84ee05d6-7689-446b-a8b6-4e186bbbec44" (UID: "84ee05d6-7689-446b-a8b6-4e186bbbec44"). InnerVolumeSpecName "kube-api-access-42m28". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.760236 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "84ee05d6-7689-446b-a8b6-4e186bbbec44" (UID: "84ee05d6-7689-446b-a8b6-4e186bbbec44"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.850218 4672 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.850253 4672 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.850264 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42m28\" (UniqueName: \"kubernetes.io/projected/84ee05d6-7689-446b-a8b6-4e186bbbec44-kube-api-access-42m28\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.850272 4672 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.850281 4672 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.850289 4672 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:02 crc kubenswrapper[4672]: I1007 15:03:02.850296 4672 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/84ee05d6-7689-446b-a8b6-4e186bbbec44-console-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.326639 4672 generic.go:334] "Generic (PLEG): container finished" podID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerID="aa3bc433106adfc5ee484edd748ea3aac3a27d4f35d62823f9f81fab862ccb2e" exitCode=0 Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.326717 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" event={"ID":"00782a44-6ec2-425f-90d2-15ebc242cf3b","Type":"ContainerDied","Data":"aa3bc433106adfc5ee484edd748ea3aac3a27d4f35d62823f9f81fab862ccb2e"} Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.329116 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-5547l_84ee05d6-7689-446b-a8b6-4e186bbbec44/console/0.log" Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.329169 4672 generic.go:334] "Generic (PLEG): container finished" podID="84ee05d6-7689-446b-a8b6-4e186bbbec44" containerID="aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8" exitCode=2 Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.329207 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5547l" event={"ID":"84ee05d6-7689-446b-a8b6-4e186bbbec44","Type":"ContainerDied","Data":"aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8"} Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.329248 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5547l" event={"ID":"84ee05d6-7689-446b-a8b6-4e186bbbec44","Type":"ContainerDied","Data":"abaa98dcfed8289b943b265d5fb277825db2b11d26662b549198b2b29528f62c"} Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.329246 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5547l" Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.329275 4672 scope.go:117] "RemoveContainer" containerID="aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8" Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.347807 4672 scope.go:117] "RemoveContainer" containerID="aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8" Oct 07 15:03:03 crc kubenswrapper[4672]: E1007 15:03:03.348182 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8\": container with ID starting with aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8 not found: ID does not exist" containerID="aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8" Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.348229 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8"} err="failed to get container status \"aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8\": rpc error: code = NotFound desc = could not find container \"aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8\": container with ID starting with aa16de65106d36afc7fb8967661aee1739bd495f12f69c7df6e5354e573cb8b8 not found: ID does not exist" Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.356234 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-5547l"] Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.364680 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-5547l"] Oct 07 15:03:03 crc kubenswrapper[4672]: I1007 15:03:03.905579 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84ee05d6-7689-446b-a8b6-4e186bbbec44" path="/var/lib/kubelet/pods/84ee05d6-7689-446b-a8b6-4e186bbbec44/volumes" Oct 07 15:03:04 crc kubenswrapper[4672]: I1007 15:03:04.337846 4672 generic.go:334] "Generic (PLEG): container finished" podID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerID="60e9a4e8830e089cacae330b1266e6fa4ba14ce3b29ca5ce76f0c119e4856aa0" exitCode=0 Oct 07 15:03:04 crc kubenswrapper[4672]: I1007 15:03:04.337882 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" event={"ID":"00782a44-6ec2-425f-90d2-15ebc242cf3b","Type":"ContainerDied","Data":"60e9a4e8830e089cacae330b1266e6fa4ba14ce3b29ca5ce76f0c119e4856aa0"} Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.569596 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.684121 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-util\") pod \"00782a44-6ec2-425f-90d2-15ebc242cf3b\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.684535 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77gp7\" (UniqueName: \"kubernetes.io/projected/00782a44-6ec2-425f-90d2-15ebc242cf3b-kube-api-access-77gp7\") pod \"00782a44-6ec2-425f-90d2-15ebc242cf3b\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.684606 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-bundle\") pod \"00782a44-6ec2-425f-90d2-15ebc242cf3b\" (UID: \"00782a44-6ec2-425f-90d2-15ebc242cf3b\") " Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.686222 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-bundle" (OuterVolumeSpecName: "bundle") pod "00782a44-6ec2-425f-90d2-15ebc242cf3b" (UID: "00782a44-6ec2-425f-90d2-15ebc242cf3b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.691176 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00782a44-6ec2-425f-90d2-15ebc242cf3b-kube-api-access-77gp7" (OuterVolumeSpecName: "kube-api-access-77gp7") pod "00782a44-6ec2-425f-90d2-15ebc242cf3b" (UID: "00782a44-6ec2-425f-90d2-15ebc242cf3b"). InnerVolumeSpecName "kube-api-access-77gp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.701406 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-util" (OuterVolumeSpecName: "util") pod "00782a44-6ec2-425f-90d2-15ebc242cf3b" (UID: "00782a44-6ec2-425f-90d2-15ebc242cf3b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.785257 4672 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-util\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.785289 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77gp7\" (UniqueName: \"kubernetes.io/projected/00782a44-6ec2-425f-90d2-15ebc242cf3b-kube-api-access-77gp7\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:05 crc kubenswrapper[4672]: I1007 15:03:05.785299 4672 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/00782a44-6ec2-425f-90d2-15ebc242cf3b-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.349229 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" event={"ID":"00782a44-6ec2-425f-90d2-15ebc242cf3b","Type":"ContainerDied","Data":"f1f95bab78cdf4af80fdf6a5b38b469796aeb412b6ed872f7577c43ae5c951ef"} Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.349284 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1f95bab78cdf4af80fdf6a5b38b469796aeb412b6ed872f7577c43ae5c951ef" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.349307 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.372747 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q96lf"] Oct 07 15:03:06 crc kubenswrapper[4672]: E1007 15:03:06.372978 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerName="pull" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.372989 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerName="pull" Oct 07 15:03:06 crc kubenswrapper[4672]: E1007 15:03:06.373004 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerName="extract" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.373010 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerName="extract" Oct 07 15:03:06 crc kubenswrapper[4672]: E1007 15:03:06.373040 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerName="util" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.373046 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerName="util" Oct 07 15:03:06 crc kubenswrapper[4672]: E1007 15:03:06.373059 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ee05d6-7689-446b-a8b6-4e186bbbec44" containerName="console" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.373066 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ee05d6-7689-446b-a8b6-4e186bbbec44" containerName="console" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.373155 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="00782a44-6ec2-425f-90d2-15ebc242cf3b" containerName="extract" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.373170 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ee05d6-7689-446b-a8b6-4e186bbbec44" containerName="console" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.373880 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.384094 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q96lf"] Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.493061 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-catalog-content\") pod \"certified-operators-q96lf\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.493117 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-utilities\") pod \"certified-operators-q96lf\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.493142 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnvqz\" (UniqueName: \"kubernetes.io/projected/32d520c0-95e3-48a3-8b64-f3151ed0eec9-kube-api-access-nnvqz\") pod \"certified-operators-q96lf\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.595281 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-catalog-content\") pod \"certified-operators-q96lf\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.595325 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-utilities\") pod \"certified-operators-q96lf\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.595355 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnvqz\" (UniqueName: \"kubernetes.io/projected/32d520c0-95e3-48a3-8b64-f3151ed0eec9-kube-api-access-nnvqz\") pod \"certified-operators-q96lf\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.596111 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-catalog-content\") pod \"certified-operators-q96lf\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.596140 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-utilities\") pod \"certified-operators-q96lf\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.617991 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnvqz\" (UniqueName: \"kubernetes.io/projected/32d520c0-95e3-48a3-8b64-f3151ed0eec9-kube-api-access-nnvqz\") pod \"certified-operators-q96lf\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:06 crc kubenswrapper[4672]: I1007 15:03:06.687647 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:07 crc kubenswrapper[4672]: I1007 15:03:07.123959 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q96lf"] Oct 07 15:03:07 crc kubenswrapper[4672]: W1007 15:03:07.131362 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32d520c0_95e3_48a3_8b64_f3151ed0eec9.slice/crio-409f7946db871a86602157c620677cf967ac4a474cb09bd247ddeca68e89a53b WatchSource:0}: Error finding container 409f7946db871a86602157c620677cf967ac4a474cb09bd247ddeca68e89a53b: Status 404 returned error can't find the container with id 409f7946db871a86602157c620677cf967ac4a474cb09bd247ddeca68e89a53b Oct 07 15:03:07 crc kubenswrapper[4672]: I1007 15:03:07.365909 4672 generic.go:334] "Generic (PLEG): container finished" podID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerID="51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63" exitCode=0 Oct 07 15:03:07 crc kubenswrapper[4672]: I1007 15:03:07.365963 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q96lf" event={"ID":"32d520c0-95e3-48a3-8b64-f3151ed0eec9","Type":"ContainerDied","Data":"51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63"} Oct 07 15:03:07 crc kubenswrapper[4672]: I1007 15:03:07.366160 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q96lf" event={"ID":"32d520c0-95e3-48a3-8b64-f3151ed0eec9","Type":"ContainerStarted","Data":"409f7946db871a86602157c620677cf967ac4a474cb09bd247ddeca68e89a53b"} Oct 07 15:03:08 crc kubenswrapper[4672]: I1007 15:03:08.375085 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q96lf" event={"ID":"32d520c0-95e3-48a3-8b64-f3151ed0eec9","Type":"ContainerStarted","Data":"76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f"} Oct 07 15:03:09 crc kubenswrapper[4672]: I1007 15:03:09.385751 4672 generic.go:334] "Generic (PLEG): container finished" podID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerID="76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f" exitCode=0 Oct 07 15:03:09 crc kubenswrapper[4672]: I1007 15:03:09.385806 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q96lf" event={"ID":"32d520c0-95e3-48a3-8b64-f3151ed0eec9","Type":"ContainerDied","Data":"76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f"} Oct 07 15:03:10 crc kubenswrapper[4672]: I1007 15:03:10.394616 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q96lf" event={"ID":"32d520c0-95e3-48a3-8b64-f3151ed0eec9","Type":"ContainerStarted","Data":"9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a"} Oct 07 15:03:10 crc kubenswrapper[4672]: I1007 15:03:10.413297 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q96lf" podStartSLOduration=1.9969908219999999 podStartE2EDuration="4.413275221s" podCreationTimestamp="2025-10-07 15:03:06 +0000 UTC" firstStartedPulling="2025-10-07 15:03:07.367823172 +0000 UTC m=+864.343001753" lastFinishedPulling="2025-10-07 15:03:09.784107571 +0000 UTC m=+866.759286152" observedRunningTime="2025-10-07 15:03:10.411728307 +0000 UTC m=+867.386906888" watchObservedRunningTime="2025-10-07 15:03:10.413275221 +0000 UTC m=+867.388453802" Oct 07 15:03:16 crc kubenswrapper[4672]: I1007 15:03:16.688573 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:16 crc kubenswrapper[4672]: I1007 15:03:16.689188 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:16 crc kubenswrapper[4672]: I1007 15:03:16.731317 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:17 crc kubenswrapper[4672]: I1007 15:03:17.469957 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.365889 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f"] Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.367048 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.368795 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-lnjf8" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.369367 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.369412 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.369502 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.375840 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.390141 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f"] Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.536902 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/257d8fce-653e-49bf-ba47-f5d0e156298d-webhook-cert\") pod \"metallb-operator-controller-manager-79645d9fd4-bzw8f\" (UID: \"257d8fce-653e-49bf-ba47-f5d0e156298d\") " pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.536971 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdpg7\" (UniqueName: \"kubernetes.io/projected/257d8fce-653e-49bf-ba47-f5d0e156298d-kube-api-access-cdpg7\") pod \"metallb-operator-controller-manager-79645d9fd4-bzw8f\" (UID: \"257d8fce-653e-49bf-ba47-f5d0e156298d\") " pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.537178 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/257d8fce-653e-49bf-ba47-f5d0e156298d-apiservice-cert\") pod \"metallb-operator-controller-manager-79645d9fd4-bzw8f\" (UID: \"257d8fce-653e-49bf-ba47-f5d0e156298d\") " pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.590674 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w"] Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.591553 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.592851 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-6jhfz" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.593043 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.593454 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.638112 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/257d8fce-653e-49bf-ba47-f5d0e156298d-webhook-cert\") pod \"metallb-operator-controller-manager-79645d9fd4-bzw8f\" (UID: \"257d8fce-653e-49bf-ba47-f5d0e156298d\") " pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.638373 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdpg7\" (UniqueName: \"kubernetes.io/projected/257d8fce-653e-49bf-ba47-f5d0e156298d-kube-api-access-cdpg7\") pod \"metallb-operator-controller-manager-79645d9fd4-bzw8f\" (UID: \"257d8fce-653e-49bf-ba47-f5d0e156298d\") " pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.638448 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/257d8fce-653e-49bf-ba47-f5d0e156298d-apiservice-cert\") pod \"metallb-operator-controller-manager-79645d9fd4-bzw8f\" (UID: \"257d8fce-653e-49bf-ba47-f5d0e156298d\") " pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.644110 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/257d8fce-653e-49bf-ba47-f5d0e156298d-apiservice-cert\") pod \"metallb-operator-controller-manager-79645d9fd4-bzw8f\" (UID: \"257d8fce-653e-49bf-ba47-f5d0e156298d\") " pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.645820 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/257d8fce-653e-49bf-ba47-f5d0e156298d-webhook-cert\") pod \"metallb-operator-controller-manager-79645d9fd4-bzw8f\" (UID: \"257d8fce-653e-49bf-ba47-f5d0e156298d\") " pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.649096 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w"] Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.654954 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdpg7\" (UniqueName: \"kubernetes.io/projected/257d8fce-653e-49bf-ba47-f5d0e156298d-kube-api-access-cdpg7\") pod \"metallb-operator-controller-manager-79645d9fd4-bzw8f\" (UID: \"257d8fce-653e-49bf-ba47-f5d0e156298d\") " pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.689202 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.739713 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/82da041e-ab89-4015-baa7-491b55cc00ba-apiservice-cert\") pod \"metallb-operator-webhook-server-9f889fdd4-rjh7w\" (UID: \"82da041e-ab89-4015-baa7-491b55cc00ba\") " pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.740125 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/82da041e-ab89-4015-baa7-491b55cc00ba-webhook-cert\") pod \"metallb-operator-webhook-server-9f889fdd4-rjh7w\" (UID: \"82da041e-ab89-4015-baa7-491b55cc00ba\") " pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.740322 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmc96\" (UniqueName: \"kubernetes.io/projected/82da041e-ab89-4015-baa7-491b55cc00ba-kube-api-access-dmc96\") pod \"metallb-operator-webhook-server-9f889fdd4-rjh7w\" (UID: \"82da041e-ab89-4015-baa7-491b55cc00ba\") " pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.841244 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/82da041e-ab89-4015-baa7-491b55cc00ba-webhook-cert\") pod \"metallb-operator-webhook-server-9f889fdd4-rjh7w\" (UID: \"82da041e-ab89-4015-baa7-491b55cc00ba\") " pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.841321 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmc96\" (UniqueName: \"kubernetes.io/projected/82da041e-ab89-4015-baa7-491b55cc00ba-kube-api-access-dmc96\") pod \"metallb-operator-webhook-server-9f889fdd4-rjh7w\" (UID: \"82da041e-ab89-4015-baa7-491b55cc00ba\") " pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.841441 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/82da041e-ab89-4015-baa7-491b55cc00ba-apiservice-cert\") pod \"metallb-operator-webhook-server-9f889fdd4-rjh7w\" (UID: \"82da041e-ab89-4015-baa7-491b55cc00ba\") " pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.851751 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/82da041e-ab89-4015-baa7-491b55cc00ba-webhook-cert\") pod \"metallb-operator-webhook-server-9f889fdd4-rjh7w\" (UID: \"82da041e-ab89-4015-baa7-491b55cc00ba\") " pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.853760 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/82da041e-ab89-4015-baa7-491b55cc00ba-apiservice-cert\") pod \"metallb-operator-webhook-server-9f889fdd4-rjh7w\" (UID: \"82da041e-ab89-4015-baa7-491b55cc00ba\") " pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.864339 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmc96\" (UniqueName: \"kubernetes.io/projected/82da041e-ab89-4015-baa7-491b55cc00ba-kube-api-access-dmc96\") pod \"metallb-operator-webhook-server-9f889fdd4-rjh7w\" (UID: \"82da041e-ab89-4015-baa7-491b55cc00ba\") " pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.900209 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f"] Oct 07 15:03:18 crc kubenswrapper[4672]: W1007 15:03:18.905194 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod257d8fce_653e_49bf_ba47_f5d0e156298d.slice/crio-b0d2ad5f49181a634039d711c2096a090f83b321be92476d2a01060eb437735c WatchSource:0}: Error finding container b0d2ad5f49181a634039d711c2096a090f83b321be92476d2a01060eb437735c: Status 404 returned error can't find the container with id b0d2ad5f49181a634039d711c2096a090f83b321be92476d2a01060eb437735c Oct 07 15:03:18 crc kubenswrapper[4672]: I1007 15:03:18.905250 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:19 crc kubenswrapper[4672]: I1007 15:03:19.105521 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w"] Oct 07 15:03:19 crc kubenswrapper[4672]: W1007 15:03:19.115456 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82da041e_ab89_4015_baa7_491b55cc00ba.slice/crio-56307ec4a263ca01d537283455df7135275b86c968765789e873a89613f472a5 WatchSource:0}: Error finding container 56307ec4a263ca01d537283455df7135275b86c968765789e873a89613f472a5: Status 404 returned error can't find the container with id 56307ec4a263ca01d537283455df7135275b86c968765789e873a89613f472a5 Oct 07 15:03:19 crc kubenswrapper[4672]: I1007 15:03:19.439747 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" event={"ID":"257d8fce-653e-49bf-ba47-f5d0e156298d","Type":"ContainerStarted","Data":"b0d2ad5f49181a634039d711c2096a090f83b321be92476d2a01060eb437735c"} Oct 07 15:03:19 crc kubenswrapper[4672]: I1007 15:03:19.440771 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" event={"ID":"82da041e-ab89-4015-baa7-491b55cc00ba","Type":"ContainerStarted","Data":"56307ec4a263ca01d537283455df7135275b86c968765789e873a89613f472a5"} Oct 07 15:03:24 crc kubenswrapper[4672]: I1007 15:03:24.762766 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q96lf"] Oct 07 15:03:24 crc kubenswrapper[4672]: I1007 15:03:24.763516 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q96lf" podUID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerName="registry-server" containerID="cri-o://9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a" gracePeriod=2 Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.135269 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.330996 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-utilities\") pod \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.331085 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnvqz\" (UniqueName: \"kubernetes.io/projected/32d520c0-95e3-48a3-8b64-f3151ed0eec9-kube-api-access-nnvqz\") pod \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.331217 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-catalog-content\") pod \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\" (UID: \"32d520c0-95e3-48a3-8b64-f3151ed0eec9\") " Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.332097 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-utilities" (OuterVolumeSpecName: "utilities") pod "32d520c0-95e3-48a3-8b64-f3151ed0eec9" (UID: "32d520c0-95e3-48a3-8b64-f3151ed0eec9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.336305 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32d520c0-95e3-48a3-8b64-f3151ed0eec9-kube-api-access-nnvqz" (OuterVolumeSpecName: "kube-api-access-nnvqz") pod "32d520c0-95e3-48a3-8b64-f3151ed0eec9" (UID: "32d520c0-95e3-48a3-8b64-f3151ed0eec9"). InnerVolumeSpecName "kube-api-access-nnvqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.383228 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32d520c0-95e3-48a3-8b64-f3151ed0eec9" (UID: "32d520c0-95e3-48a3-8b64-f3151ed0eec9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.432929 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.432970 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnvqz\" (UniqueName: \"kubernetes.io/projected/32d520c0-95e3-48a3-8b64-f3151ed0eec9-kube-api-access-nnvqz\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.432981 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d520c0-95e3-48a3-8b64-f3151ed0eec9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.477703 4672 generic.go:334] "Generic (PLEG): container finished" podID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerID="9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a" exitCode=0 Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.477776 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q96lf" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.477796 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q96lf" event={"ID":"32d520c0-95e3-48a3-8b64-f3151ed0eec9","Type":"ContainerDied","Data":"9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a"} Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.477858 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q96lf" event={"ID":"32d520c0-95e3-48a3-8b64-f3151ed0eec9","Type":"ContainerDied","Data":"409f7946db871a86602157c620677cf967ac4a474cb09bd247ddeca68e89a53b"} Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.477878 4672 scope.go:117] "RemoveContainer" containerID="9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.479872 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" event={"ID":"257d8fce-653e-49bf-ba47-f5d0e156298d","Type":"ContainerStarted","Data":"348f05303624acb42fa358f3f1f26ef27c48756b2695255ee8b5fd1083444707"} Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.480036 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.481316 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" event={"ID":"82da041e-ab89-4015-baa7-491b55cc00ba","Type":"ContainerStarted","Data":"2a6f8c5c2808dbb3afa1f1c45b0753ba148b3f4f6d8e01aa07c67aa0d1f55409"} Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.481477 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.496248 4672 scope.go:117] "RemoveContainer" containerID="76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.506560 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" podStartSLOduration=1.896157265 podStartE2EDuration="7.506538954s" podCreationTimestamp="2025-10-07 15:03:18 +0000 UTC" firstStartedPulling="2025-10-07 15:03:18.907326874 +0000 UTC m=+875.882505455" lastFinishedPulling="2025-10-07 15:03:24.517708563 +0000 UTC m=+881.492887144" observedRunningTime="2025-10-07 15:03:25.504768713 +0000 UTC m=+882.479947294" watchObservedRunningTime="2025-10-07 15:03:25.506538954 +0000 UTC m=+882.481717525" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.519876 4672 scope.go:117] "RemoveContainer" containerID="51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.540532 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" podStartSLOduration=2.126236713 podStartE2EDuration="7.540511029s" podCreationTimestamp="2025-10-07 15:03:18 +0000 UTC" firstStartedPulling="2025-10-07 15:03:19.118986623 +0000 UTC m=+876.094165194" lastFinishedPulling="2025-10-07 15:03:24.533260929 +0000 UTC m=+881.508439510" observedRunningTime="2025-10-07 15:03:25.533123977 +0000 UTC m=+882.508302548" watchObservedRunningTime="2025-10-07 15:03:25.540511029 +0000 UTC m=+882.515689610" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.543436 4672 scope.go:117] "RemoveContainer" containerID="9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a" Oct 07 15:03:25 crc kubenswrapper[4672]: E1007 15:03:25.544492 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a\": container with ID starting with 9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a not found: ID does not exist" containerID="9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.544539 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a"} err="failed to get container status \"9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a\": rpc error: code = NotFound desc = could not find container \"9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a\": container with ID starting with 9e076585b9642fc392ba491145630ada9cb807637eda142fbbd268e0803ab79a not found: ID does not exist" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.544571 4672 scope.go:117] "RemoveContainer" containerID="76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f" Oct 07 15:03:25 crc kubenswrapper[4672]: E1007 15:03:25.548463 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f\": container with ID starting with 76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f not found: ID does not exist" containerID="76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.548517 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f"} err="failed to get container status \"76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f\": rpc error: code = NotFound desc = could not find container \"76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f\": container with ID starting with 76c40674c3407cdd72a645bd9587c46b7c3bf2c5facf63eeb0f7616ad9bdee2f not found: ID does not exist" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.548550 4672 scope.go:117] "RemoveContainer" containerID="51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63" Oct 07 15:03:25 crc kubenswrapper[4672]: E1007 15:03:25.548905 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63\": container with ID starting with 51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63 not found: ID does not exist" containerID="51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.548952 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63"} err="failed to get container status \"51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63\": rpc error: code = NotFound desc = could not find container \"51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63\": container with ID starting with 51bbebf79865d99217271580ac6264e168a4e4c99bc396c790eeacb75d9c7e63 not found: ID does not exist" Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.556974 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q96lf"] Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.559054 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q96lf"] Oct 07 15:03:25 crc kubenswrapper[4672]: I1007 15:03:25.903111 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" path="/var/lib/kubelet/pods/32d520c0-95e3-48a3-8b64-f3151ed0eec9/volumes" Oct 07 15:03:38 crc kubenswrapper[4672]: I1007 15:03:38.910642 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-9f889fdd4-rjh7w" Oct 07 15:03:56 crc kubenswrapper[4672]: I1007 15:03:56.650894 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:03:56 crc kubenswrapper[4672]: I1007 15:03:56.651445 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:03:58 crc kubenswrapper[4672]: I1007 15:03:58.691837 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-79645d9fd4-bzw8f" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.419695 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6"] Oct 07 15:03:59 crc kubenswrapper[4672]: E1007 15:03:59.420175 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerName="extract-utilities" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.420253 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerName="extract-utilities" Oct 07 15:03:59 crc kubenswrapper[4672]: E1007 15:03:59.420324 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerName="registry-server" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.420384 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerName="registry-server" Oct 07 15:03:59 crc kubenswrapper[4672]: E1007 15:03:59.420441 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerName="extract-content" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.420492 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerName="extract-content" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.420637 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="32d520c0-95e3-48a3-8b64-f3151ed0eec9" containerName="registry-server" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.421098 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.423712 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-xrmb7"] Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.425989 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.426333 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-dkfzx" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.426736 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.427406 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.428076 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.474535 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6"] Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.495466 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-dnlzl"] Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.496422 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.500179 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.500304 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.500587 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.501057 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-cjjhk" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.513290 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b9adeec-cd4d-4b83-8dd6-124c90eaa801-cert\") pod \"frr-k8s-webhook-server-64bf5d555-zb5t6\" (UID: \"6b9adeec-cd4d-4b83-8dd6-124c90eaa801\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.513354 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/977f6996-2c6a-4749-a101-914929de6749-frr-startup\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.513380 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-reloader\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.513547 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdzn2\" (UniqueName: \"kubernetes.io/projected/977f6996-2c6a-4749-a101-914929de6749-kube-api-access-jdzn2\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.513639 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvdsc\" (UniqueName: \"kubernetes.io/projected/6b9adeec-cd4d-4b83-8dd6-124c90eaa801-kube-api-access-hvdsc\") pod \"frr-k8s-webhook-server-64bf5d555-zb5t6\" (UID: \"6b9adeec-cd4d-4b83-8dd6-124c90eaa801\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.513710 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-frr-sockets\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.513758 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-frr-conf\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.513782 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/977f6996-2c6a-4749-a101-914929de6749-metrics-certs\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.513897 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-metrics\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.519125 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-2qk76"] Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.520315 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.526072 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.535561 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-2qk76"] Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.614993 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/977f6996-2c6a-4749-a101-914929de6749-frr-startup\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615049 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-reloader\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615070 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/11d7aebd-bbc3-49e3-b5ac-53377112f97f-metallb-excludel2\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615097 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdzn2\" (UniqueName: \"kubernetes.io/projected/977f6996-2c6a-4749-a101-914929de6749-kube-api-access-jdzn2\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615116 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfac07f1-5471-4d32-9064-01b6f600c640-cert\") pod \"controller-68d546b9d8-2qk76\" (UID: \"cfac07f1-5471-4d32-9064-01b6f600c640\") " pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615133 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prnxp\" (UniqueName: \"kubernetes.io/projected/11d7aebd-bbc3-49e3-b5ac-53377112f97f-kube-api-access-prnxp\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615157 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvdsc\" (UniqueName: \"kubernetes.io/projected/6b9adeec-cd4d-4b83-8dd6-124c90eaa801-kube-api-access-hvdsc\") pod \"frr-k8s-webhook-server-64bf5d555-zb5t6\" (UID: \"6b9adeec-cd4d-4b83-8dd6-124c90eaa801\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615176 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-frr-sockets\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615201 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-frr-conf\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615217 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/977f6996-2c6a-4749-a101-914929de6749-metrics-certs\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615250 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbn84\" (UniqueName: \"kubernetes.io/projected/cfac07f1-5471-4d32-9064-01b6f600c640-kube-api-access-qbn84\") pod \"controller-68d546b9d8-2qk76\" (UID: \"cfac07f1-5471-4d32-9064-01b6f600c640\") " pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615272 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfac07f1-5471-4d32-9064-01b6f600c640-metrics-certs\") pod \"controller-68d546b9d8-2qk76\" (UID: \"cfac07f1-5471-4d32-9064-01b6f600c640\") " pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615326 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-metrics-certs\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615347 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-memberlist\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615369 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-metrics\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.615388 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b9adeec-cd4d-4b83-8dd6-124c90eaa801-cert\") pod \"frr-k8s-webhook-server-64bf5d555-zb5t6\" (UID: \"6b9adeec-cd4d-4b83-8dd6-124c90eaa801\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:03:59 crc kubenswrapper[4672]: E1007 15:03:59.616347 4672 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 07 15:03:59 crc kubenswrapper[4672]: E1007 15:03:59.616441 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/977f6996-2c6a-4749-a101-914929de6749-metrics-certs podName:977f6996-2c6a-4749-a101-914929de6749 nodeName:}" failed. No retries permitted until 2025-10-07 15:04:00.116420811 +0000 UTC m=+917.091599392 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/977f6996-2c6a-4749-a101-914929de6749-metrics-certs") pod "frr-k8s-xrmb7" (UID: "977f6996-2c6a-4749-a101-914929de6749") : secret "frr-k8s-certs-secret" not found Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.616645 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-frr-sockets\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.617043 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/977f6996-2c6a-4749-a101-914929de6749-frr-startup\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.618278 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-metrics\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.618477 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-frr-conf\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.618493 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/977f6996-2c6a-4749-a101-914929de6749-reloader\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.621816 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b9adeec-cd4d-4b83-8dd6-124c90eaa801-cert\") pod \"frr-k8s-webhook-server-64bf5d555-zb5t6\" (UID: \"6b9adeec-cd4d-4b83-8dd6-124c90eaa801\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.642902 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvdsc\" (UniqueName: \"kubernetes.io/projected/6b9adeec-cd4d-4b83-8dd6-124c90eaa801-kube-api-access-hvdsc\") pod \"frr-k8s-webhook-server-64bf5d555-zb5t6\" (UID: \"6b9adeec-cd4d-4b83-8dd6-124c90eaa801\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.655729 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdzn2\" (UniqueName: \"kubernetes.io/projected/977f6996-2c6a-4749-a101-914929de6749-kube-api-access-jdzn2\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.717102 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbn84\" (UniqueName: \"kubernetes.io/projected/cfac07f1-5471-4d32-9064-01b6f600c640-kube-api-access-qbn84\") pod \"controller-68d546b9d8-2qk76\" (UID: \"cfac07f1-5471-4d32-9064-01b6f600c640\") " pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.717156 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfac07f1-5471-4d32-9064-01b6f600c640-metrics-certs\") pod \"controller-68d546b9d8-2qk76\" (UID: \"cfac07f1-5471-4d32-9064-01b6f600c640\") " pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.717180 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-metrics-certs\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.717203 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-memberlist\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.717254 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/11d7aebd-bbc3-49e3-b5ac-53377112f97f-metallb-excludel2\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.717306 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfac07f1-5471-4d32-9064-01b6f600c640-cert\") pod \"controller-68d546b9d8-2qk76\" (UID: \"cfac07f1-5471-4d32-9064-01b6f600c640\") " pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.717329 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prnxp\" (UniqueName: \"kubernetes.io/projected/11d7aebd-bbc3-49e3-b5ac-53377112f97f-kube-api-access-prnxp\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: E1007 15:03:59.718191 4672 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 07 15:03:59 crc kubenswrapper[4672]: E1007 15:03:59.718264 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-memberlist podName:11d7aebd-bbc3-49e3-b5ac-53377112f97f nodeName:}" failed. No retries permitted until 2025-10-07 15:04:00.218247535 +0000 UTC m=+917.193426116 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-memberlist") pod "speaker-dnlzl" (UID: "11d7aebd-bbc3-49e3-b5ac-53377112f97f") : secret "metallb-memberlist" not found Oct 07 15:03:59 crc kubenswrapper[4672]: E1007 15:03:59.718679 4672 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 07 15:03:59 crc kubenswrapper[4672]: E1007 15:03:59.718822 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-metrics-certs podName:11d7aebd-bbc3-49e3-b5ac-53377112f97f nodeName:}" failed. No retries permitted until 2025-10-07 15:04:00.218800161 +0000 UTC m=+917.193978862 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-metrics-certs") pod "speaker-dnlzl" (UID: "11d7aebd-bbc3-49e3-b5ac-53377112f97f") : secret "speaker-certs-secret" not found Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.719425 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/11d7aebd-bbc3-49e3-b5ac-53377112f97f-metallb-excludel2\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.720584 4672 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.722370 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfac07f1-5471-4d32-9064-01b6f600c640-metrics-certs\") pod \"controller-68d546b9d8-2qk76\" (UID: \"cfac07f1-5471-4d32-9064-01b6f600c640\") " pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.738222 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfac07f1-5471-4d32-9064-01b6f600c640-cert\") pod \"controller-68d546b9d8-2qk76\" (UID: \"cfac07f1-5471-4d32-9064-01b6f600c640\") " pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.739899 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbn84\" (UniqueName: \"kubernetes.io/projected/cfac07f1-5471-4d32-9064-01b6f600c640-kube-api-access-qbn84\") pod \"controller-68d546b9d8-2qk76\" (UID: \"cfac07f1-5471-4d32-9064-01b6f600c640\") " pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.740251 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.742488 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prnxp\" (UniqueName: \"kubernetes.io/projected/11d7aebd-bbc3-49e3-b5ac-53377112f97f-kube-api-access-prnxp\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:03:59 crc kubenswrapper[4672]: I1007 15:03:59.833985 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.005255 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-2qk76"] Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.129822 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/977f6996-2c6a-4749-a101-914929de6749-metrics-certs\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.138485 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/977f6996-2c6a-4749-a101-914929de6749-metrics-certs\") pod \"frr-k8s-xrmb7\" (UID: \"977f6996-2c6a-4749-a101-914929de6749\") " pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.144512 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6"] Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.231048 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-metrics-certs\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.231105 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-memberlist\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:04:00 crc kubenswrapper[4672]: E1007 15:04:00.231205 4672 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 07 15:04:00 crc kubenswrapper[4672]: E1007 15:04:00.231263 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-memberlist podName:11d7aebd-bbc3-49e3-b5ac-53377112f97f nodeName:}" failed. No retries permitted until 2025-10-07 15:04:01.23124646 +0000 UTC m=+918.206425051 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-memberlist") pod "speaker-dnlzl" (UID: "11d7aebd-bbc3-49e3-b5ac-53377112f97f") : secret "metallb-memberlist" not found Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.234810 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-metrics-certs\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.349448 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.669229 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerStarted","Data":"8e712147e0fb90aacd8188cde15a09cf8ca45e75174f3fe54db16fdddf492794"} Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.671451 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-2qk76" event={"ID":"cfac07f1-5471-4d32-9064-01b6f600c640","Type":"ContainerStarted","Data":"98f7bab9397e27d3fb52db329d5cc55edbd6c523ec1c31cc26ff19808966dd39"} Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.671479 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-2qk76" event={"ID":"cfac07f1-5471-4d32-9064-01b6f600c640","Type":"ContainerStarted","Data":"5db878ad8bdfd76756d4b71f7074bc44f80b0a33458512d6fc44164520908da5"} Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.671491 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-2qk76" event={"ID":"cfac07f1-5471-4d32-9064-01b6f600c640","Type":"ContainerStarted","Data":"eedf0c68742ed5f4bd43230ae57bf9e10c74f2b76d3c423b97b7af07f150ed42"} Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.671610 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.672917 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" event={"ID":"6b9adeec-cd4d-4b83-8dd6-124c90eaa801","Type":"ContainerStarted","Data":"12f1d5133b006c2339c19eb27546a8688e611dd91eb760640f6dc8cd76998713"} Oct 07 15:04:00 crc kubenswrapper[4672]: I1007 15:04:00.691571 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-2qk76" podStartSLOduration=1.69155169 podStartE2EDuration="1.69155169s" podCreationTimestamp="2025-10-07 15:03:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:04:00.688353758 +0000 UTC m=+917.663532349" watchObservedRunningTime="2025-10-07 15:04:00.69155169 +0000 UTC m=+917.666730271" Oct 07 15:04:01 crc kubenswrapper[4672]: I1007 15:04:01.247148 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-memberlist\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:04:01 crc kubenswrapper[4672]: I1007 15:04:01.251191 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/11d7aebd-bbc3-49e3-b5ac-53377112f97f-memberlist\") pod \"speaker-dnlzl\" (UID: \"11d7aebd-bbc3-49e3-b5ac-53377112f97f\") " pod="metallb-system/speaker-dnlzl" Oct 07 15:04:01 crc kubenswrapper[4672]: I1007 15:04:01.310087 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-dnlzl" Oct 07 15:04:01 crc kubenswrapper[4672]: W1007 15:04:01.332735 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11d7aebd_bbc3_49e3_b5ac_53377112f97f.slice/crio-331a547a9e2a4ccca21f5f2bc3cc1fb460d5c6a3626b146052d8593fa7388df0 WatchSource:0}: Error finding container 331a547a9e2a4ccca21f5f2bc3cc1fb460d5c6a3626b146052d8593fa7388df0: Status 404 returned error can't find the container with id 331a547a9e2a4ccca21f5f2bc3cc1fb460d5c6a3626b146052d8593fa7388df0 Oct 07 15:04:01 crc kubenswrapper[4672]: I1007 15:04:01.683278 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dnlzl" event={"ID":"11d7aebd-bbc3-49e3-b5ac-53377112f97f","Type":"ContainerStarted","Data":"3a56567dd8a044c59b8b7b766f1723363738e99a2aa5bd4560781ce7b54b0efb"} Oct 07 15:04:01 crc kubenswrapper[4672]: I1007 15:04:01.683596 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dnlzl" event={"ID":"11d7aebd-bbc3-49e3-b5ac-53377112f97f","Type":"ContainerStarted","Data":"331a547a9e2a4ccca21f5f2bc3cc1fb460d5c6a3626b146052d8593fa7388df0"} Oct 07 15:04:02 crc kubenswrapper[4672]: I1007 15:04:02.698630 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dnlzl" event={"ID":"11d7aebd-bbc3-49e3-b5ac-53377112f97f","Type":"ContainerStarted","Data":"94065c93d4c3da1fd94d0bbba09f7a3ba5b334c81c796b8c920b349fff927bbc"} Oct 07 15:04:02 crc kubenswrapper[4672]: I1007 15:04:02.698769 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-dnlzl" Oct 07 15:04:02 crc kubenswrapper[4672]: I1007 15:04:02.714193 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-dnlzl" podStartSLOduration=3.7141758830000002 podStartE2EDuration="3.714175883s" podCreationTimestamp="2025-10-07 15:03:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:04:02.71373048 +0000 UTC m=+919.688909071" watchObservedRunningTime="2025-10-07 15:04:02.714175883 +0000 UTC m=+919.689354464" Oct 07 15:04:07 crc kubenswrapper[4672]: I1007 15:04:07.739147 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" event={"ID":"6b9adeec-cd4d-4b83-8dd6-124c90eaa801","Type":"ContainerStarted","Data":"d0db39ccb3271596b944c65b927a451c0d51691bae85bb814a4bd1a191a1b03d"} Oct 07 15:04:07 crc kubenswrapper[4672]: I1007 15:04:07.739822 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:04:07 crc kubenswrapper[4672]: I1007 15:04:07.740747 4672 generic.go:334] "Generic (PLEG): container finished" podID="977f6996-2c6a-4749-a101-914929de6749" containerID="71abc722fb99f94f1537cb0e157fcc1d7f5f6e88194123a6802d47e200f108e4" exitCode=0 Oct 07 15:04:07 crc kubenswrapper[4672]: I1007 15:04:07.740799 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerDied","Data":"71abc722fb99f94f1537cb0e157fcc1d7f5f6e88194123a6802d47e200f108e4"} Oct 07 15:04:07 crc kubenswrapper[4672]: I1007 15:04:07.763473 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" podStartSLOduration=1.827461053 podStartE2EDuration="8.763458016s" podCreationTimestamp="2025-10-07 15:03:59 +0000 UTC" firstStartedPulling="2025-10-07 15:04:00.148160923 +0000 UTC m=+917.123339504" lastFinishedPulling="2025-10-07 15:04:07.084157886 +0000 UTC m=+924.059336467" observedRunningTime="2025-10-07 15:04:07.758150284 +0000 UTC m=+924.733328885" watchObservedRunningTime="2025-10-07 15:04:07.763458016 +0000 UTC m=+924.738636597" Oct 07 15:04:08 crc kubenswrapper[4672]: I1007 15:04:08.748837 4672 generic.go:334] "Generic (PLEG): container finished" podID="977f6996-2c6a-4749-a101-914929de6749" containerID="5a129d174b521ddacee1ac9e8d9082b028e9c411fb9830aec555bbaa2252d97f" exitCode=0 Oct 07 15:04:08 crc kubenswrapper[4672]: I1007 15:04:08.748914 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerDied","Data":"5a129d174b521ddacee1ac9e8d9082b028e9c411fb9830aec555bbaa2252d97f"} Oct 07 15:04:09 crc kubenswrapper[4672]: I1007 15:04:09.757122 4672 generic.go:334] "Generic (PLEG): container finished" podID="977f6996-2c6a-4749-a101-914929de6749" containerID="d59525be5e0daa346ff8f3d3648848c664b188aace372bdabf7742ab06189224" exitCode=0 Oct 07 15:04:09 crc kubenswrapper[4672]: I1007 15:04:09.757165 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerDied","Data":"d59525be5e0daa346ff8f3d3648848c664b188aace372bdabf7742ab06189224"} Oct 07 15:04:10 crc kubenswrapper[4672]: I1007 15:04:10.772448 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerStarted","Data":"cfe3e9a8d342cfd5a73c9262eb4c8c1dff450f769988c0c628ab3ff06aa82b38"} Oct 07 15:04:10 crc kubenswrapper[4672]: I1007 15:04:10.772902 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerStarted","Data":"d4ae9b972f061fa038ede479841eb0638ccfc374544e3bbeee042c14b44a35f2"} Oct 07 15:04:10 crc kubenswrapper[4672]: I1007 15:04:10.772916 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerStarted","Data":"201e8a6a26365e8108011d1e169ee9207bc45029fc7d7b36810e4ebeae44b36c"} Oct 07 15:04:10 crc kubenswrapper[4672]: I1007 15:04:10.772928 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerStarted","Data":"7cb01790cb2eb91800cfe1361e3ac1af7c45ccaca10ad6b05e4aae1cec7d9764"} Oct 07 15:04:10 crc kubenswrapper[4672]: I1007 15:04:10.772938 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerStarted","Data":"c7f2510e6daa668139cd4e1ed0b6460dc50a4ec7ad453b89248ac4d22cdb5e95"} Oct 07 15:04:11 crc kubenswrapper[4672]: I1007 15:04:11.314580 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-dnlzl" Oct 07 15:04:11 crc kubenswrapper[4672]: I1007 15:04:11.780723 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrmb7" event={"ID":"977f6996-2c6a-4749-a101-914929de6749","Type":"ContainerStarted","Data":"8f71807d232b7aa429e9245e748060a3de827cc77f6cc4c26d8ab12b42e0c134"} Oct 07 15:04:11 crc kubenswrapper[4672]: I1007 15:04:11.780887 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:04:11 crc kubenswrapper[4672]: I1007 15:04:11.800663 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-xrmb7" podStartSLOduration=6.137555186 podStartE2EDuration="12.80064058s" podCreationTimestamp="2025-10-07 15:03:59 +0000 UTC" firstStartedPulling="2025-10-07 15:04:00.440126429 +0000 UTC m=+917.415305010" lastFinishedPulling="2025-10-07 15:04:07.103211823 +0000 UTC m=+924.078390404" observedRunningTime="2025-10-07 15:04:11.797862991 +0000 UTC m=+928.773041592" watchObservedRunningTime="2025-10-07 15:04:11.80064058 +0000 UTC m=+928.775819151" Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.340426 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-r44zg"] Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.342495 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-r44zg" Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.345878 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-bntbz" Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.345961 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.346201 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.362579 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-r44zg"] Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.452853 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2686\" (UniqueName: \"kubernetes.io/projected/e6e03e23-3ab0-42cd-8b45-ab5f16516750-kube-api-access-t2686\") pod \"openstack-operator-index-r44zg\" (UID: \"e6e03e23-3ab0-42cd-8b45-ab5f16516750\") " pod="openstack-operators/openstack-operator-index-r44zg" Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.553548 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2686\" (UniqueName: \"kubernetes.io/projected/e6e03e23-3ab0-42cd-8b45-ab5f16516750-kube-api-access-t2686\") pod \"openstack-operator-index-r44zg\" (UID: \"e6e03e23-3ab0-42cd-8b45-ab5f16516750\") " pod="openstack-operators/openstack-operator-index-r44zg" Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.573575 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2686\" (UniqueName: \"kubernetes.io/projected/e6e03e23-3ab0-42cd-8b45-ab5f16516750-kube-api-access-t2686\") pod \"openstack-operator-index-r44zg\" (UID: \"e6e03e23-3ab0-42cd-8b45-ab5f16516750\") " pod="openstack-operators/openstack-operator-index-r44zg" Oct 07 15:04:14 crc kubenswrapper[4672]: I1007 15:04:14.662542 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-r44zg" Oct 07 15:04:15 crc kubenswrapper[4672]: I1007 15:04:15.033608 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-r44zg"] Oct 07 15:04:15 crc kubenswrapper[4672]: W1007 15:04:15.040217 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6e03e23_3ab0_42cd_8b45_ab5f16516750.slice/crio-47936028c66dd0a1c487ff68da75da5dbeaa809bb0f97ef898eed31bf3fac13d WatchSource:0}: Error finding container 47936028c66dd0a1c487ff68da75da5dbeaa809bb0f97ef898eed31bf3fac13d: Status 404 returned error can't find the container with id 47936028c66dd0a1c487ff68da75da5dbeaa809bb0f97ef898eed31bf3fac13d Oct 07 15:04:15 crc kubenswrapper[4672]: I1007 15:04:15.350345 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:04:15 crc kubenswrapper[4672]: I1007 15:04:15.400411 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:04:15 crc kubenswrapper[4672]: I1007 15:04:15.816043 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-r44zg" event={"ID":"e6e03e23-3ab0-42cd-8b45-ab5f16516750","Type":"ContainerStarted","Data":"47936028c66dd0a1c487ff68da75da5dbeaa809bb0f97ef898eed31bf3fac13d"} Oct 07 15:04:17 crc kubenswrapper[4672]: I1007 15:04:17.718446 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-r44zg"] Oct 07 15:04:18 crc kubenswrapper[4672]: I1007 15:04:18.327966 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-6zf4v"] Oct 07 15:04:18 crc kubenswrapper[4672]: I1007 15:04:18.329821 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6zf4v" Oct 07 15:04:18 crc kubenswrapper[4672]: I1007 15:04:18.333660 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6zf4v"] Oct 07 15:04:18 crc kubenswrapper[4672]: I1007 15:04:18.503401 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk2fj\" (UniqueName: \"kubernetes.io/projected/34e36355-636c-455e-a493-0ff6fe705d28-kube-api-access-xk2fj\") pod \"openstack-operator-index-6zf4v\" (UID: \"34e36355-636c-455e-a493-0ff6fe705d28\") " pod="openstack-operators/openstack-operator-index-6zf4v" Oct 07 15:04:18 crc kubenswrapper[4672]: I1007 15:04:18.604672 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk2fj\" (UniqueName: \"kubernetes.io/projected/34e36355-636c-455e-a493-0ff6fe705d28-kube-api-access-xk2fj\") pod \"openstack-operator-index-6zf4v\" (UID: \"34e36355-636c-455e-a493-0ff6fe705d28\") " pod="openstack-operators/openstack-operator-index-6zf4v" Oct 07 15:04:18 crc kubenswrapper[4672]: I1007 15:04:18.622762 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk2fj\" (UniqueName: \"kubernetes.io/projected/34e36355-636c-455e-a493-0ff6fe705d28-kube-api-access-xk2fj\") pod \"openstack-operator-index-6zf4v\" (UID: \"34e36355-636c-455e-a493-0ff6fe705d28\") " pod="openstack-operators/openstack-operator-index-6zf4v" Oct 07 15:04:18 crc kubenswrapper[4672]: I1007 15:04:18.662061 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6zf4v" Oct 07 15:04:19 crc kubenswrapper[4672]: I1007 15:04:19.745160 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-zb5t6" Oct 07 15:04:19 crc kubenswrapper[4672]: I1007 15:04:19.838450 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-2qk76" Oct 07 15:04:20 crc kubenswrapper[4672]: I1007 15:04:20.355087 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-xrmb7" Oct 07 15:04:26 crc kubenswrapper[4672]: I1007 15:04:26.650223 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:04:26 crc kubenswrapper[4672]: I1007 15:04:26.650990 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:04:30 crc kubenswrapper[4672]: I1007 15:04:30.777977 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6zf4v"] Oct 07 15:04:30 crc kubenswrapper[4672]: W1007 15:04:30.883910 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34e36355_636c_455e_a493_0ff6fe705d28.slice/crio-3ae7b3e475352d2aab6da7461f58eba37c90403d0b8e8a96a043e7ed9026dcdf WatchSource:0}: Error finding container 3ae7b3e475352d2aab6da7461f58eba37c90403d0b8e8a96a043e7ed9026dcdf: Status 404 returned error can't find the container with id 3ae7b3e475352d2aab6da7461f58eba37c90403d0b8e8a96a043e7ed9026dcdf Oct 07 15:04:30 crc kubenswrapper[4672]: I1007 15:04:30.902141 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6zf4v" event={"ID":"34e36355-636c-455e-a493-0ff6fe705d28","Type":"ContainerStarted","Data":"3ae7b3e475352d2aab6da7461f58eba37c90403d0b8e8a96a043e7ed9026dcdf"} Oct 07 15:04:32 crc kubenswrapper[4672]: E1007 15:04:32.162991 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/openstack-k8s-operators/openstack-operator-index:b9af9b071c68d03a6ce8d8386899abae205acbbc" Oct 07 15:04:32 crc kubenswrapper[4672]: E1007 15:04:32.163263 4672 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/openstack-k8s-operators/openstack-operator-index:b9af9b071c68d03a6ce8d8386899abae205acbbc" Oct 07 15:04:32 crc kubenswrapper[4672]: E1007 15:04:32.163388 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.103:5001/openstack-k8s-operators/openstack-operator-index:b9af9b071c68d03a6ce8d8386899abae205acbbc,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-t2686,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-r44zg_openstack-operators(e6e03e23-3ab0-42cd-8b45-ab5f16516750): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 15:04:32 crc kubenswrapper[4672]: E1007 15:04:32.165159 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-operator-index-r44zg" podUID="e6e03e23-3ab0-42cd-8b45-ab5f16516750" Oct 07 15:04:32 crc kubenswrapper[4672]: I1007 15:04:32.916636 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6zf4v" event={"ID":"34e36355-636c-455e-a493-0ff6fe705d28","Type":"ContainerStarted","Data":"e7f3a1244e959003c79dcf2f1cdd7da0989169948dc09c34e70ee6adf90311a4"} Oct 07 15:04:32 crc kubenswrapper[4672]: I1007 15:04:32.948579 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-6zf4v" podStartSLOduration=13.64084185 podStartE2EDuration="14.948563509s" podCreationTimestamp="2025-10-07 15:04:18 +0000 UTC" firstStartedPulling="2025-10-07 15:04:30.885992709 +0000 UTC m=+947.861171290" lastFinishedPulling="2025-10-07 15:04:32.193714368 +0000 UTC m=+949.168892949" observedRunningTime="2025-10-07 15:04:32.945004557 +0000 UTC m=+949.920183138" watchObservedRunningTime="2025-10-07 15:04:32.948563509 +0000 UTC m=+949.923742090" Oct 07 15:04:33 crc kubenswrapper[4672]: I1007 15:04:33.138026 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-r44zg" Oct 07 15:04:33 crc kubenswrapper[4672]: I1007 15:04:33.199413 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2686\" (UniqueName: \"kubernetes.io/projected/e6e03e23-3ab0-42cd-8b45-ab5f16516750-kube-api-access-t2686\") pod \"e6e03e23-3ab0-42cd-8b45-ab5f16516750\" (UID: \"e6e03e23-3ab0-42cd-8b45-ab5f16516750\") " Oct 07 15:04:33 crc kubenswrapper[4672]: I1007 15:04:33.207317 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6e03e23-3ab0-42cd-8b45-ab5f16516750-kube-api-access-t2686" (OuterVolumeSpecName: "kube-api-access-t2686") pod "e6e03e23-3ab0-42cd-8b45-ab5f16516750" (UID: "e6e03e23-3ab0-42cd-8b45-ab5f16516750"). InnerVolumeSpecName "kube-api-access-t2686". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:04:33 crc kubenswrapper[4672]: I1007 15:04:33.301162 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2686\" (UniqueName: \"kubernetes.io/projected/e6e03e23-3ab0-42cd-8b45-ab5f16516750-kube-api-access-t2686\") on node \"crc\" DevicePath \"\"" Oct 07 15:04:33 crc kubenswrapper[4672]: I1007 15:04:33.923151 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-r44zg" event={"ID":"e6e03e23-3ab0-42cd-8b45-ab5f16516750","Type":"ContainerDied","Data":"47936028c66dd0a1c487ff68da75da5dbeaa809bb0f97ef898eed31bf3fac13d"} Oct 07 15:04:33 crc kubenswrapper[4672]: I1007 15:04:33.923413 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-r44zg" Oct 07 15:04:33 crc kubenswrapper[4672]: I1007 15:04:33.955220 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-r44zg"] Oct 07 15:04:33 crc kubenswrapper[4672]: I1007 15:04:33.959375 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-r44zg"] Oct 07 15:04:35 crc kubenswrapper[4672]: I1007 15:04:35.898485 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6e03e23-3ab0-42cd-8b45-ab5f16516750" path="/var/lib/kubelet/pods/e6e03e23-3ab0-42cd-8b45-ab5f16516750/volumes" Oct 07 15:04:38 crc kubenswrapper[4672]: I1007 15:04:38.663032 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-6zf4v" Oct 07 15:04:38 crc kubenswrapper[4672]: I1007 15:04:38.663079 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-6zf4v" Oct 07 15:04:38 crc kubenswrapper[4672]: I1007 15:04:38.685285 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-6zf4v" Oct 07 15:04:38 crc kubenswrapper[4672]: I1007 15:04:38.969001 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-6zf4v" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.624305 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg"] Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.625871 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.627495 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-tvntc" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.633792 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg"] Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.789829 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-bundle\") pod \"55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.790141 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9fp7\" (UniqueName: \"kubernetes.io/projected/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-kube-api-access-j9fp7\") pod \"55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.790179 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-util\") pod \"55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.890900 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9fp7\" (UniqueName: \"kubernetes.io/projected/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-kube-api-access-j9fp7\") pod \"55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.890963 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-util\") pod \"55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.891005 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-bundle\") pod \"55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.891500 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-util\") pod \"55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.891555 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-bundle\") pod \"55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.908919 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9fp7\" (UniqueName: \"kubernetes.io/projected/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-kube-api-access-j9fp7\") pod \"55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:48 crc kubenswrapper[4672]: I1007 15:04:48.943003 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:49 crc kubenswrapper[4672]: I1007 15:04:49.151429 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg"] Oct 07 15:04:50 crc kubenswrapper[4672]: I1007 15:04:50.006657 4672 generic.go:334] "Generic (PLEG): container finished" podID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerID="14fbfcc722c32efee53d93906162534bf2cc1e64776b4e7ce7bc9416a270748e" exitCode=0 Oct 07 15:04:50 crc kubenswrapper[4672]: I1007 15:04:50.006701 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" event={"ID":"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf","Type":"ContainerDied","Data":"14fbfcc722c32efee53d93906162534bf2cc1e64776b4e7ce7bc9416a270748e"} Oct 07 15:04:50 crc kubenswrapper[4672]: I1007 15:04:50.006749 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" event={"ID":"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf","Type":"ContainerStarted","Data":"89e6ebeb4333f8fc1326a2e75e003132ed609575698d4b7b349cc31bde10e375"} Oct 07 15:04:51 crc kubenswrapper[4672]: I1007 15:04:51.014583 4672 generic.go:334] "Generic (PLEG): container finished" podID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerID="2f1021beeebb2d694a64f53d3ec3d460599e2785b1f01df57a623ab6418c4c25" exitCode=0 Oct 07 15:04:51 crc kubenswrapper[4672]: I1007 15:04:51.014676 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" event={"ID":"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf","Type":"ContainerDied","Data":"2f1021beeebb2d694a64f53d3ec3d460599e2785b1f01df57a623ab6418c4c25"} Oct 07 15:04:52 crc kubenswrapper[4672]: I1007 15:04:52.025359 4672 generic.go:334] "Generic (PLEG): container finished" podID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerID="23f7df4a64f188e361bc57befaf4980ebdf26a018ecf031f419576cf2abd3159" exitCode=0 Oct 07 15:04:52 crc kubenswrapper[4672]: I1007 15:04:52.025453 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" event={"ID":"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf","Type":"ContainerDied","Data":"23f7df4a64f188e361bc57befaf4980ebdf26a018ecf031f419576cf2abd3159"} Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.249655 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.353481 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-bundle\") pod \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.354283 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9fp7\" (UniqueName: \"kubernetes.io/projected/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-kube-api-access-j9fp7\") pod \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.354348 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-util\") pod \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\" (UID: \"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf\") " Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.355336 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-bundle" (OuterVolumeSpecName: "bundle") pod "ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" (UID: "ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.361723 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-kube-api-access-j9fp7" (OuterVolumeSpecName: "kube-api-access-j9fp7") pod "ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" (UID: "ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf"). InnerVolumeSpecName "kube-api-access-j9fp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.368435 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-util" (OuterVolumeSpecName: "util") pod "ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" (UID: "ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.456903 4672 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.456968 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9fp7\" (UniqueName: \"kubernetes.io/projected/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-kube-api-access-j9fp7\") on node \"crc\" DevicePath \"\"" Oct 07 15:04:53 crc kubenswrapper[4672]: I1007 15:04:53.456992 4672 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf-util\") on node \"crc\" DevicePath \"\"" Oct 07 15:04:54 crc kubenswrapper[4672]: I1007 15:04:54.042224 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" event={"ID":"ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf","Type":"ContainerDied","Data":"89e6ebeb4333f8fc1326a2e75e003132ed609575698d4b7b349cc31bde10e375"} Oct 07 15:04:54 crc kubenswrapper[4672]: I1007 15:04:54.042273 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89e6ebeb4333f8fc1326a2e75e003132ed609575698d4b7b349cc31bde10e375" Oct 07 15:04:54 crc kubenswrapper[4672]: I1007 15:04:54.042288 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg" Oct 07 15:04:56 crc kubenswrapper[4672]: I1007 15:04:56.650679 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:04:56 crc kubenswrapper[4672]: I1007 15:04:56.651061 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:04:56 crc kubenswrapper[4672]: I1007 15:04:56.651112 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:04:56 crc kubenswrapper[4672]: I1007 15:04:56.651820 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be2c87c8e05952dbb0cedcf0cbcc2ad775db0f756c18accfa273872c875b7f56"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:04:56 crc kubenswrapper[4672]: I1007 15:04:56.651889 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://be2c87c8e05952dbb0cedcf0cbcc2ad775db0f756c18accfa273872c875b7f56" gracePeriod=600 Oct 07 15:04:57 crc kubenswrapper[4672]: I1007 15:04:57.062855 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="be2c87c8e05952dbb0cedcf0cbcc2ad775db0f756c18accfa273872c875b7f56" exitCode=0 Oct 07 15:04:57 crc kubenswrapper[4672]: I1007 15:04:57.062894 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"be2c87c8e05952dbb0cedcf0cbcc2ad775db0f756c18accfa273872c875b7f56"} Oct 07 15:04:57 crc kubenswrapper[4672]: I1007 15:04:57.063160 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"8e961035363bda8e811ffc1c695a7f2039c7606ff629f74fa31612bebfcadced"} Oct 07 15:04:57 crc kubenswrapper[4672]: I1007 15:04:57.063177 4672 scope.go:117] "RemoveContainer" containerID="e7f37a84632604d116b2f44fdb47fdd21bdcd9295b5577b67f25b2f2a0425bdb" Oct 07 15:05:00 crc kubenswrapper[4672]: I1007 15:05:00.919312 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9"] Oct 07 15:05:00 crc kubenswrapper[4672]: E1007 15:05:00.919888 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerName="util" Oct 07 15:05:00 crc kubenswrapper[4672]: I1007 15:05:00.919906 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerName="util" Oct 07 15:05:00 crc kubenswrapper[4672]: E1007 15:05:00.919921 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerName="pull" Oct 07 15:05:00 crc kubenswrapper[4672]: I1007 15:05:00.919928 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerName="pull" Oct 07 15:05:00 crc kubenswrapper[4672]: E1007 15:05:00.919942 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerName="extract" Oct 07 15:05:00 crc kubenswrapper[4672]: I1007 15:05:00.919949 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerName="extract" Oct 07 15:05:00 crc kubenswrapper[4672]: I1007 15:05:00.920132 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf" containerName="extract" Oct 07 15:05:00 crc kubenswrapper[4672]: I1007 15:05:00.920858 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" Oct 07 15:05:00 crc kubenswrapper[4672]: I1007 15:05:00.923262 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-m67nj" Oct 07 15:05:00 crc kubenswrapper[4672]: I1007 15:05:00.944197 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9"] Oct 07 15:05:01 crc kubenswrapper[4672]: I1007 15:05:01.061893 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b6j9\" (UniqueName: \"kubernetes.io/projected/c2468e9c-5d8a-487f-8870-3b89f8c0e905-kube-api-access-4b6j9\") pod \"openstack-operator-controller-operator-7b5c677b9b-6dzk9\" (UID: \"c2468e9c-5d8a-487f-8870-3b89f8c0e905\") " pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" Oct 07 15:05:01 crc kubenswrapper[4672]: I1007 15:05:01.163564 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b6j9\" (UniqueName: \"kubernetes.io/projected/c2468e9c-5d8a-487f-8870-3b89f8c0e905-kube-api-access-4b6j9\") pod \"openstack-operator-controller-operator-7b5c677b9b-6dzk9\" (UID: \"c2468e9c-5d8a-487f-8870-3b89f8c0e905\") " pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" Oct 07 15:05:01 crc kubenswrapper[4672]: I1007 15:05:01.182284 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b6j9\" (UniqueName: \"kubernetes.io/projected/c2468e9c-5d8a-487f-8870-3b89f8c0e905-kube-api-access-4b6j9\") pod \"openstack-operator-controller-operator-7b5c677b9b-6dzk9\" (UID: \"c2468e9c-5d8a-487f-8870-3b89f8c0e905\") " pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" Oct 07 15:05:01 crc kubenswrapper[4672]: I1007 15:05:01.241636 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" Oct 07 15:05:01 crc kubenswrapper[4672]: I1007 15:05:01.640780 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9"] Oct 07 15:05:01 crc kubenswrapper[4672]: W1007 15:05:01.648258 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2468e9c_5d8a_487f_8870_3b89f8c0e905.slice/crio-e5d0ad3769df21c6692e5f426d40f43b869cc2306e905ed6001c9ab3c461c87b WatchSource:0}: Error finding container e5d0ad3769df21c6692e5f426d40f43b869cc2306e905ed6001c9ab3c461c87b: Status 404 returned error can't find the container with id e5d0ad3769df21c6692e5f426d40f43b869cc2306e905ed6001c9ab3c461c87b Oct 07 15:05:02 crc kubenswrapper[4672]: I1007 15:05:02.094568 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" event={"ID":"c2468e9c-5d8a-487f-8870-3b89f8c0e905","Type":"ContainerStarted","Data":"e5d0ad3769df21c6692e5f426d40f43b869cc2306e905ed6001c9ab3c461c87b"} Oct 07 15:05:06 crc kubenswrapper[4672]: I1007 15:05:06.119138 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" event={"ID":"c2468e9c-5d8a-487f-8870-3b89f8c0e905","Type":"ContainerStarted","Data":"9123254013fcf1b01bc72193151df284a0bcab8773cff44759a8e2f494c8dab9"} Oct 07 15:05:08 crc kubenswrapper[4672]: I1007 15:05:08.132748 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" event={"ID":"c2468e9c-5d8a-487f-8870-3b89f8c0e905","Type":"ContainerStarted","Data":"3556c8717938941f78c3e24a1a3cd3035f8e59d7ce6233577e233185f388c0f9"} Oct 07 15:05:08 crc kubenswrapper[4672]: I1007 15:05:08.132892 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" Oct 07 15:05:08 crc kubenswrapper[4672]: I1007 15:05:08.162753 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" podStartSLOduration=2.292538582 podStartE2EDuration="8.162734483s" podCreationTimestamp="2025-10-07 15:05:00 +0000 UTC" firstStartedPulling="2025-10-07 15:05:01.650129241 +0000 UTC m=+978.625307822" lastFinishedPulling="2025-10-07 15:05:07.520325142 +0000 UTC m=+984.495503723" observedRunningTime="2025-10-07 15:05:08.161822007 +0000 UTC m=+985.137000588" watchObservedRunningTime="2025-10-07 15:05:08.162734483 +0000 UTC m=+985.137913064" Oct 07 15:05:11 crc kubenswrapper[4672]: I1007 15:05:11.244827 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7b5c677b9b-6dzk9" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.185392 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.186994 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.190296 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-ktg7d" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.205459 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.206682 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.210552 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-xjtk7" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.211350 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.217816 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.219630 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.221878 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-86flv" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.228662 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.229626 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.233998 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-446v4" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.242396 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.253847 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.259821 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.270880 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.271786 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.275453 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-42l2q" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.285946 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97shq\" (UniqueName: \"kubernetes.io/projected/d427841d-eba8-45b1-aa18-de4a5d1fecaa-kube-api-access-97shq\") pod \"cinder-operator-controller-manager-7d4d4f8d-jnwxm\" (UID: \"d427841d-eba8-45b1-aa18-de4a5d1fecaa\") " pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.286065 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v47sf\" (UniqueName: \"kubernetes.io/projected/b5f8d795-a31d-4992-99fc-590848eae6fd-kube-api-access-v47sf\") pod \"barbican-operator-controller-manager-58c4cd55f4-hvp85\" (UID: \"b5f8d795-a31d-4992-99fc-590848eae6fd\") " pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.306224 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.307529 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.309948 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.310710 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-7zgb5" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.322389 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.340086 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.341330 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-649675d675-598db"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.342216 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.342675 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.345201 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-vv7qs" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.345455 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-62424" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.346204 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.368083 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.377237 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-649675d675-598db"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.387605 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr7np\" (UniqueName: \"kubernetes.io/projected/a70458ed-18c5-49ef-8e30-83e39c3ec5e5-kube-api-access-lr7np\") pod \"glance-operator-controller-manager-5dc44df7d5-2mddh\" (UID: \"a70458ed-18c5-49ef-8e30-83e39c3ec5e5\") " pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.387647 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24cvl\" (UniqueName: \"kubernetes.io/projected/47db8f43-eb79-4338-88e1-1b464c8de306-kube-api-access-24cvl\") pod \"horizon-operator-controller-manager-76d5b87f47-m7vrt\" (UID: \"47db8f43-eb79-4338-88e1-1b464c8de306\") " pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.387688 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v47sf\" (UniqueName: \"kubernetes.io/projected/b5f8d795-a31d-4992-99fc-590848eae6fd-kube-api-access-v47sf\") pod \"barbican-operator-controller-manager-58c4cd55f4-hvp85\" (UID: \"b5f8d795-a31d-4992-99fc-590848eae6fd\") " pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.387723 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdctr\" (UniqueName: \"kubernetes.io/projected/fba2eb6a-4cb0-4fc9-9625-e7a57382e412-kube-api-access-sdctr\") pod \"designate-operator-controller-manager-75dfd9b554-l4zjl\" (UID: \"fba2eb6a-4cb0-4fc9-9625-e7a57382e412\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.387801 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97shq\" (UniqueName: \"kubernetes.io/projected/d427841d-eba8-45b1-aa18-de4a5d1fecaa-kube-api-access-97shq\") pod \"cinder-operator-controller-manager-7d4d4f8d-jnwxm\" (UID: \"d427841d-eba8-45b1-aa18-de4a5d1fecaa\") " pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.387818 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m697\" (UniqueName: \"kubernetes.io/projected/8c3d7854-7f93-46f3-aa4c-1c26dc987cbe-kube-api-access-9m697\") pod \"heat-operator-controller-manager-54b4974c45-hbh5b\" (UID: \"8c3d7854-7f93-46f3-aa4c-1c26dc987cbe\") " pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.388043 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.389190 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.392155 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-qm4hr" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.395291 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.396405 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.398803 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-4cghg" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.409251 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.437168 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.484887 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.488848 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.488926 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97shq\" (UniqueName: \"kubernetes.io/projected/d427841d-eba8-45b1-aa18-de4a5d1fecaa-kube-api-access-97shq\") pod \"cinder-operator-controller-manager-7d4d4f8d-jnwxm\" (UID: \"d427841d-eba8-45b1-aa18-de4a5d1fecaa\") " pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.489674 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.489820 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8s8rs\" (UniqueName: \"kubernetes.io/projected/255dad32-3ed4-49eb-8e4d-6cc40d83acc7-kube-api-access-8s8rs\") pod \"keystone-operator-controller-manager-7b5ccf6d9c-72pgl\" (UID: \"255dad32-3ed4-49eb-8e4d-6cc40d83acc7\") " pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.489880 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdctr\" (UniqueName: \"kubernetes.io/projected/fba2eb6a-4cb0-4fc9-9625-e7a57382e412-kube-api-access-sdctr\") pod \"designate-operator-controller-manager-75dfd9b554-l4zjl\" (UID: \"fba2eb6a-4cb0-4fc9-9625-e7a57382e412\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.489902 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xm46\" (UniqueName: \"kubernetes.io/projected/8b98122f-1fe5-456b-9e60-e0ac676afbfc-kube-api-access-8xm46\") pod \"ironic-operator-controller-manager-649675d675-598db\" (UID: \"8b98122f-1fe5-456b-9e60-e0ac676afbfc\") " pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.489925 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4q7z\" (UniqueName: \"kubernetes.io/projected/2186b39e-fac1-49ed-a0d3-d925a4a7c2e6-kube-api-access-x4q7z\") pod \"infra-operator-controller-manager-658588b8c9-8f4wl\" (UID: \"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.489957 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m697\" (UniqueName: \"kubernetes.io/projected/8c3d7854-7f93-46f3-aa4c-1c26dc987cbe-kube-api-access-9m697\") pod \"heat-operator-controller-manager-54b4974c45-hbh5b\" (UID: \"8c3d7854-7f93-46f3-aa4c-1c26dc987cbe\") " pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.489968 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.489991 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lh79\" (UniqueName: \"kubernetes.io/projected/764c99ab-d28b-4a93-b2e6-5abdef46cde8-kube-api-access-6lh79\") pod \"manila-operator-controller-manager-65d89cfd9f-44jhh\" (UID: \"764c99ab-d28b-4a93-b2e6-5abdef46cde8\") " pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.490010 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2186b39e-fac1-49ed-a0d3-d925a4a7c2e6-cert\") pod \"infra-operator-controller-manager-658588b8c9-8f4wl\" (UID: \"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.490062 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr7np\" (UniqueName: \"kubernetes.io/projected/a70458ed-18c5-49ef-8e30-83e39c3ec5e5-kube-api-access-lr7np\") pod \"glance-operator-controller-manager-5dc44df7d5-2mddh\" (UID: \"a70458ed-18c5-49ef-8e30-83e39c3ec5e5\") " pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.490079 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24cvl\" (UniqueName: \"kubernetes.io/projected/47db8f43-eb79-4338-88e1-1b464c8de306-kube-api-access-24cvl\") pod \"horizon-operator-controller-manager-76d5b87f47-m7vrt\" (UID: \"47db8f43-eb79-4338-88e1-1b464c8de306\") " pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.493252 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-xjm5j" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.493860 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v47sf\" (UniqueName: \"kubernetes.io/projected/b5f8d795-a31d-4992-99fc-590848eae6fd-kube-api-access-v47sf\") pod \"barbican-operator-controller-manager-58c4cd55f4-hvp85\" (UID: \"b5f8d795-a31d-4992-99fc-590848eae6fd\") " pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.494782 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-hc9sv" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.509835 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.512577 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.529153 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.533198 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr7np\" (UniqueName: \"kubernetes.io/projected/a70458ed-18c5-49ef-8e30-83e39c3ec5e5-kube-api-access-lr7np\") pod \"glance-operator-controller-manager-5dc44df7d5-2mddh\" (UID: \"a70458ed-18c5-49ef-8e30-83e39c3ec5e5\") " pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.542727 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24cvl\" (UniqueName: \"kubernetes.io/projected/47db8f43-eb79-4338-88e1-1b464c8de306-kube-api-access-24cvl\") pod \"horizon-operator-controller-manager-76d5b87f47-m7vrt\" (UID: \"47db8f43-eb79-4338-88e1-1b464c8de306\") " pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.543010 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdctr\" (UniqueName: \"kubernetes.io/projected/fba2eb6a-4cb0-4fc9-9625-e7a57382e412-kube-api-access-sdctr\") pod \"designate-operator-controller-manager-75dfd9b554-l4zjl\" (UID: \"fba2eb6a-4cb0-4fc9-9625-e7a57382e412\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.547084 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.547604 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m697\" (UniqueName: \"kubernetes.io/projected/8c3d7854-7f93-46f3-aa4c-1c26dc987cbe-kube-api-access-9m697\") pod \"heat-operator-controller-manager-54b4974c45-hbh5b\" (UID: \"8c3d7854-7f93-46f3-aa4c-1c26dc987cbe\") " pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.559761 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.568182 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.585929 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.586999 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.591195 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8s8rs\" (UniqueName: \"kubernetes.io/projected/255dad32-3ed4-49eb-8e4d-6cc40d83acc7-kube-api-access-8s8rs\") pod \"keystone-operator-controller-manager-7b5ccf6d9c-72pgl\" (UID: \"255dad32-3ed4-49eb-8e4d-6cc40d83acc7\") " pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.591240 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fpgg\" (UniqueName: \"kubernetes.io/projected/00724d85-8a20-4114-9c19-10171b42d9d1-kube-api-access-9fpgg\") pod \"mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq\" (UID: \"00724d85-8a20-4114-9c19-10171b42d9d1\") " pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.591272 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xm46\" (UniqueName: \"kubernetes.io/projected/8b98122f-1fe5-456b-9e60-e0ac676afbfc-kube-api-access-8xm46\") pod \"ironic-operator-controller-manager-649675d675-598db\" (UID: \"8b98122f-1fe5-456b-9e60-e0ac676afbfc\") " pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.591294 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltnd4\" (UniqueName: \"kubernetes.io/projected/24fa96bf-c94d-4e2c-974a-d00f03de100d-kube-api-access-ltnd4\") pod \"neutron-operator-controller-manager-8d984cc4d-l2zj6\" (UID: \"24fa96bf-c94d-4e2c-974a-d00f03de100d\") " pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.591315 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4q7z\" (UniqueName: \"kubernetes.io/projected/2186b39e-fac1-49ed-a0d3-d925a4a7c2e6-kube-api-access-x4q7z\") pod \"infra-operator-controller-manager-658588b8c9-8f4wl\" (UID: \"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.591363 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lh79\" (UniqueName: \"kubernetes.io/projected/764c99ab-d28b-4a93-b2e6-5abdef46cde8-kube-api-access-6lh79\") pod \"manila-operator-controller-manager-65d89cfd9f-44jhh\" (UID: \"764c99ab-d28b-4a93-b2e6-5abdef46cde8\") " pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.591383 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2186b39e-fac1-49ed-a0d3-d925a4a7c2e6-cert\") pod \"infra-operator-controller-manager-658588b8c9-8f4wl\" (UID: \"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:33 crc kubenswrapper[4672]: E1007 15:05:33.591515 4672 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 07 15:05:33 crc kubenswrapper[4672]: E1007 15:05:33.591556 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2186b39e-fac1-49ed-a0d3-d925a4a7c2e6-cert podName:2186b39e-fac1-49ed-a0d3-d925a4a7c2e6 nodeName:}" failed. No retries permitted until 2025-10-07 15:05:34.091541938 +0000 UTC m=+1011.066720519 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2186b39e-fac1-49ed-a0d3-d925a4a7c2e6-cert") pod "infra-operator-controller-manager-658588b8c9-8f4wl" (UID: "2186b39e-fac1-49ed-a0d3-d925a4a7c2e6") : secret "infra-operator-webhook-server-cert" not found Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.592381 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-zvwwx" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.597514 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.598564 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.600281 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.602874 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-h9hp9" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.611941 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.619131 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lh79\" (UniqueName: \"kubernetes.io/projected/764c99ab-d28b-4a93-b2e6-5abdef46cde8-kube-api-access-6lh79\") pod \"manila-operator-controller-manager-65d89cfd9f-44jhh\" (UID: \"764c99ab-d28b-4a93-b2e6-5abdef46cde8\") " pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.627193 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xm46\" (UniqueName: \"kubernetes.io/projected/8b98122f-1fe5-456b-9e60-e0ac676afbfc-kube-api-access-8xm46\") pod \"ironic-operator-controller-manager-649675d675-598db\" (UID: \"8b98122f-1fe5-456b-9e60-e0ac676afbfc\") " pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.627497 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4q7z\" (UniqueName: \"kubernetes.io/projected/2186b39e-fac1-49ed-a0d3-d925a4a7c2e6-kube-api-access-x4q7z\") pod \"infra-operator-controller-manager-658588b8c9-8f4wl\" (UID: \"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.628771 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8s8rs\" (UniqueName: \"kubernetes.io/projected/255dad32-3ed4-49eb-8e4d-6cc40d83acc7-kube-api-access-8s8rs\") pod \"keystone-operator-controller-manager-7b5ccf6d9c-72pgl\" (UID: \"255dad32-3ed4-49eb-8e4d-6cc40d83acc7\") " pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.634444 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.637170 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.646801 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.648351 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.650475 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.654103 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.656148 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-xz7j5" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.659743 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.668355 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.668846 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.669540 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.677724 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-x9r4p" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.677769 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-n5lfr" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.677983 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.692781 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fpgg\" (UniqueName: \"kubernetes.io/projected/00724d85-8a20-4114-9c19-10171b42d9d1-kube-api-access-9fpgg\") pod \"mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq\" (UID: \"00724d85-8a20-4114-9c19-10171b42d9d1\") " pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.692830 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltnd4\" (UniqueName: \"kubernetes.io/projected/24fa96bf-c94d-4e2c-974a-d00f03de100d-kube-api-access-ltnd4\") pod \"neutron-operator-controller-manager-8d984cc4d-l2zj6\" (UID: \"24fa96bf-c94d-4e2c-974a-d00f03de100d\") " pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.692879 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqq8v\" (UniqueName: \"kubernetes.io/projected/8366fa08-0b1b-49f3-8ac1-7df869356e24-kube-api-access-kqq8v\") pod \"octavia-operator-controller-manager-7468f855d8-w89qd\" (UID: \"8366fa08-0b1b-49f3-8ac1-7df869356e24\") " pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.692909 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2njll\" (UniqueName: \"kubernetes.io/projected/58058dbe-db1a-41b4-8643-21f790efaac3-kube-api-access-2njll\") pod \"nova-operator-controller-manager-7c7fc454ff-9rcgh\" (UID: \"58058dbe-db1a-41b4-8643-21f790efaac3\") " pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.694492 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.715376 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.716927 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fpgg\" (UniqueName: \"kubernetes.io/projected/00724d85-8a20-4114-9c19-10171b42d9d1-kube-api-access-9fpgg\") pod \"mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq\" (UID: \"00724d85-8a20-4114-9c19-10171b42d9d1\") " pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.721474 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.722044 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltnd4\" (UniqueName: \"kubernetes.io/projected/24fa96bf-c94d-4e2c-974a-d00f03de100d-kube-api-access-ltnd4\") pod \"neutron-operator-controller-manager-8d984cc4d-l2zj6\" (UID: \"24fa96bf-c94d-4e2c-974a-d00f03de100d\") " pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.723845 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.729670 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2l6f4" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.732427 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.759390 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.760670 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.760753 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.766151 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-grjvx" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.775885 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.793864 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb65p\" (UniqueName: \"kubernetes.io/projected/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-kube-api-access-mb65p\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46\" (UID: \"51779ddc-1795-45cb-8ba8-8ac78b2c43c8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.793895 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46\" (UID: \"51779ddc-1795-45cb-8ba8-8ac78b2c43c8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.793925 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5brvt\" (UniqueName: \"kubernetes.io/projected/eae78c95-e2b0-4fdb-8b01-bc446045704f-kube-api-access-5brvt\") pod \"swift-operator-controller-manager-6859f9b676-9zgt5\" (UID: \"eae78c95-e2b0-4fdb-8b01-bc446045704f\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.793970 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6vpp\" (UniqueName: \"kubernetes.io/projected/c5d2b5d0-7471-41e8-a5f9-7930a07fb483-kube-api-access-g6vpp\") pod \"ovn-operator-controller-manager-6d8b6f9b9-g8n44\" (UID: \"c5d2b5d0-7471-41e8-a5f9-7930a07fb483\") " pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.793993 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqq8v\" (UniqueName: \"kubernetes.io/projected/8366fa08-0b1b-49f3-8ac1-7df869356e24-kube-api-access-kqq8v\") pod \"octavia-operator-controller-manager-7468f855d8-w89qd\" (UID: \"8366fa08-0b1b-49f3-8ac1-7df869356e24\") " pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.794011 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t96b\" (UniqueName: \"kubernetes.io/projected/5422dad7-a0a4-4116-bee5-8e5580d50530-kube-api-access-8t96b\") pod \"placement-operator-controller-manager-54689d9f88-xzhc4\" (UID: \"5422dad7-a0a4-4116-bee5-8e5580d50530\") " pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.794062 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2njll\" (UniqueName: \"kubernetes.io/projected/58058dbe-db1a-41b4-8643-21f790efaac3-kube-api-access-2njll\") pod \"nova-operator-controller-manager-7c7fc454ff-9rcgh\" (UID: \"58058dbe-db1a-41b4-8643-21f790efaac3\") " pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.794095 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xknpv\" (UniqueName: \"kubernetes.io/projected/9c7d5ea5-33a0-4006-b116-8cba83443c79-kube-api-access-xknpv\") pod \"telemetry-operator-controller-manager-5d4d74dd89-sfdxq\" (UID: \"9c7d5ea5-33a0-4006-b116-8cba83443c79\") " pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.823618 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2njll\" (UniqueName: \"kubernetes.io/projected/58058dbe-db1a-41b4-8643-21f790efaac3-kube-api-access-2njll\") pod \"nova-operator-controller-manager-7c7fc454ff-9rcgh\" (UID: \"58058dbe-db1a-41b4-8643-21f790efaac3\") " pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.823665 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqq8v\" (UniqueName: \"kubernetes.io/projected/8366fa08-0b1b-49f3-8ac1-7df869356e24-kube-api-access-kqq8v\") pod \"octavia-operator-controller-manager-7468f855d8-w89qd\" (UID: \"8366fa08-0b1b-49f3-8ac1-7df869356e24\") " pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.858535 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.860247 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.862580 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-f5lf7" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.874703 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk"] Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.899157 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6vpp\" (UniqueName: \"kubernetes.io/projected/c5d2b5d0-7471-41e8-a5f9-7930a07fb483-kube-api-access-g6vpp\") pod \"ovn-operator-controller-manager-6d8b6f9b9-g8n44\" (UID: \"c5d2b5d0-7471-41e8-a5f9-7930a07fb483\") " pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.899210 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t96b\" (UniqueName: \"kubernetes.io/projected/5422dad7-a0a4-4116-bee5-8e5580d50530-kube-api-access-8t96b\") pod \"placement-operator-controller-manager-54689d9f88-xzhc4\" (UID: \"5422dad7-a0a4-4116-bee5-8e5580d50530\") " pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.899271 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xknpv\" (UniqueName: \"kubernetes.io/projected/9c7d5ea5-33a0-4006-b116-8cba83443c79-kube-api-access-xknpv\") pod \"telemetry-operator-controller-manager-5d4d74dd89-sfdxq\" (UID: \"9c7d5ea5-33a0-4006-b116-8cba83443c79\") " pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.899344 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb65p\" (UniqueName: \"kubernetes.io/projected/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-kube-api-access-mb65p\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46\" (UID: \"51779ddc-1795-45cb-8ba8-8ac78b2c43c8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.899369 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46\" (UID: \"51779ddc-1795-45cb-8ba8-8ac78b2c43c8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.899414 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5brvt\" (UniqueName: \"kubernetes.io/projected/eae78c95-e2b0-4fdb-8b01-bc446045704f-kube-api-access-5brvt\") pod \"swift-operator-controller-manager-6859f9b676-9zgt5\" (UID: \"eae78c95-e2b0-4fdb-8b01-bc446045704f\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.899451 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9tk4\" (UniqueName: \"kubernetes.io/projected/a0710ae8-d5bc-4f95-a4ec-76128a3916bb-kube-api-access-z9tk4\") pod \"test-operator-controller-manager-5cd5cb47d7-2z4fk\" (UID: \"a0710ae8-d5bc-4f95-a4ec-76128a3916bb\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" Oct 07 15:05:33 crc kubenswrapper[4672]: E1007 15:05:33.899676 4672 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 15:05:33 crc kubenswrapper[4672]: E1007 15:05:33.899708 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-cert podName:51779ddc-1795-45cb-8ba8-8ac78b2c43c8 nodeName:}" failed. No retries permitted until 2025-10-07 15:05:34.399698289 +0000 UTC m=+1011.374876870 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-cert") pod "openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" (UID: "51779ddc-1795-45cb-8ba8-8ac78b2c43c8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.908179 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.921550 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.924000 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5brvt\" (UniqueName: \"kubernetes.io/projected/eae78c95-e2b0-4fdb-8b01-bc446045704f-kube-api-access-5brvt\") pod \"swift-operator-controller-manager-6859f9b676-9zgt5\" (UID: \"eae78c95-e2b0-4fdb-8b01-bc446045704f\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.943705 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6vpp\" (UniqueName: \"kubernetes.io/projected/c5d2b5d0-7471-41e8-a5f9-7930a07fb483-kube-api-access-g6vpp\") pod \"ovn-operator-controller-manager-6d8b6f9b9-g8n44\" (UID: \"c5d2b5d0-7471-41e8-a5f9-7930a07fb483\") " pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.944518 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.971484 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t96b\" (UniqueName: \"kubernetes.io/projected/5422dad7-a0a4-4116-bee5-8e5580d50530-kube-api-access-8t96b\") pod \"placement-operator-controller-manager-54689d9f88-xzhc4\" (UID: \"5422dad7-a0a4-4116-bee5-8e5580d50530\") " pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.977552 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xknpv\" (UniqueName: \"kubernetes.io/projected/9c7d5ea5-33a0-4006-b116-8cba83443c79-kube-api-access-xknpv\") pod \"telemetry-operator-controller-manager-5d4d74dd89-sfdxq\" (UID: \"9c7d5ea5-33a0-4006-b116-8cba83443c79\") " pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.979008 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb65p\" (UniqueName: \"kubernetes.io/projected/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-kube-api-access-mb65p\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46\" (UID: \"51779ddc-1795-45cb-8ba8-8ac78b2c43c8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:33 crc kubenswrapper[4672]: I1007 15:05:33.984377 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.000823 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9tk4\" (UniqueName: \"kubernetes.io/projected/a0710ae8-d5bc-4f95-a4ec-76128a3916bb-kube-api-access-z9tk4\") pod \"test-operator-controller-manager-5cd5cb47d7-2z4fk\" (UID: \"a0710ae8-d5bc-4f95-a4ec-76128a3916bb\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.014590 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.047990 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9tk4\" (UniqueName: \"kubernetes.io/projected/a0710ae8-d5bc-4f95-a4ec-76128a3916bb-kube-api-access-z9tk4\") pod \"test-operator-controller-manager-5cd5cb47d7-2z4fk\" (UID: \"a0710ae8-d5bc-4f95-a4ec-76128a3916bb\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.077771 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.078875 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.082889 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.082947 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-642j6" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.092357 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.105344 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqkwx\" (UniqueName: \"kubernetes.io/projected/58c50b3b-c174-42d4-bcc1-d76b0a93cd58-kube-api-access-vqkwx\") pod \"watcher-operator-controller-manager-6cbc6dd547-27xdb\" (UID: \"58c50b3b-c174-42d4-bcc1-d76b0a93cd58\") " pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.105486 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2186b39e-fac1-49ed-a0d3-d925a4a7c2e6-cert\") pod \"infra-operator-controller-manager-658588b8c9-8f4wl\" (UID: \"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.110679 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.116775 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2186b39e-fac1-49ed-a0d3-d925a4a7c2e6-cert\") pod \"infra-operator-controller-manager-658588b8c9-8f4wl\" (UID: \"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.129181 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.143133 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.148137 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.150767 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.154878 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.155106 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-fptrg" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.175431 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.204072 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.205046 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.205134 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.205930 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.206555 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l6fc\" (UniqueName: \"kubernetes.io/projected/20bd297b-c47a-4b56-9581-4b4699b7d1d4-kube-api-access-8l6fc\") pod \"openstack-operator-controller-manager-86dbb8967f-5bbbt\" (UID: \"20bd297b-c47a-4b56-9581-4b4699b7d1d4\") " pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.206584 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20bd297b-c47a-4b56-9581-4b4699b7d1d4-cert\") pod \"openstack-operator-controller-manager-86dbb8967f-5bbbt\" (UID: \"20bd297b-c47a-4b56-9581-4b4699b7d1d4\") " pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.206608 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d559l\" (UniqueName: \"kubernetes.io/projected/2815da8e-e8ee-45b7-a971-b36721ba4322-kube-api-access-d559l\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-j8bct\" (UID: \"2815da8e-e8ee-45b7-a971-b36721ba4322\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.206642 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqkwx\" (UniqueName: \"kubernetes.io/projected/58c50b3b-c174-42d4-bcc1-d76b0a93cd58-kube-api-access-vqkwx\") pod \"watcher-operator-controller-manager-6cbc6dd547-27xdb\" (UID: \"58c50b3b-c174-42d4-bcc1-d76b0a93cd58\") " pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.220843 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-fz4j5" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.250765 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqkwx\" (UniqueName: \"kubernetes.io/projected/58c50b3b-c174-42d4-bcc1-d76b0a93cd58-kube-api-access-vqkwx\") pod \"watcher-operator-controller-manager-6cbc6dd547-27xdb\" (UID: \"58c50b3b-c174-42d4-bcc1-d76b0a93cd58\") " pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.255747 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.294253 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.308785 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l6fc\" (UniqueName: \"kubernetes.io/projected/20bd297b-c47a-4b56-9581-4b4699b7d1d4-kube-api-access-8l6fc\") pod \"openstack-operator-controller-manager-86dbb8967f-5bbbt\" (UID: \"20bd297b-c47a-4b56-9581-4b4699b7d1d4\") " pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.308850 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20bd297b-c47a-4b56-9581-4b4699b7d1d4-cert\") pod \"openstack-operator-controller-manager-86dbb8967f-5bbbt\" (UID: \"20bd297b-c47a-4b56-9581-4b4699b7d1d4\") " pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.308888 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d559l\" (UniqueName: \"kubernetes.io/projected/2815da8e-e8ee-45b7-a971-b36721ba4322-kube-api-access-d559l\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-j8bct\" (UID: \"2815da8e-e8ee-45b7-a971-b36721ba4322\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" Oct 07 15:05:34 crc kubenswrapper[4672]: E1007 15:05:34.309659 4672 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 07 15:05:34 crc kubenswrapper[4672]: E1007 15:05:34.309718 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20bd297b-c47a-4b56-9581-4b4699b7d1d4-cert podName:20bd297b-c47a-4b56-9581-4b4699b7d1d4 nodeName:}" failed. No retries permitted until 2025-10-07 15:05:34.809701365 +0000 UTC m=+1011.784879946 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/20bd297b-c47a-4b56-9581-4b4699b7d1d4-cert") pod "openstack-operator-controller-manager-86dbb8967f-5bbbt" (UID: "20bd297b-c47a-4b56-9581-4b4699b7d1d4") : secret "webhook-server-cert" not found Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.338603 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l6fc\" (UniqueName: \"kubernetes.io/projected/20bd297b-c47a-4b56-9581-4b4699b7d1d4-kube-api-access-8l6fc\") pod \"openstack-operator-controller-manager-86dbb8967f-5bbbt\" (UID: \"20bd297b-c47a-4b56-9581-4b4699b7d1d4\") " pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.340086 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.340711 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d559l\" (UniqueName: \"kubernetes.io/projected/2815da8e-e8ee-45b7-a971-b36721ba4322-kube-api-access-d559l\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-j8bct\" (UID: \"2815da8e-e8ee-45b7-a971-b36721ba4322\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.411966 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46\" (UID: \"51779ddc-1795-45cb-8ba8-8ac78b2c43c8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:34 crc kubenswrapper[4672]: E1007 15:05:34.412750 4672 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 15:05:34 crc kubenswrapper[4672]: E1007 15:05:34.412824 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-cert podName:51779ddc-1795-45cb-8ba8-8ac78b2c43c8 nodeName:}" failed. No retries permitted until 2025-10-07 15:05:35.412806006 +0000 UTC m=+1012.387984587 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-cert") pod "openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" (UID: "51779ddc-1795-45cb-8ba8-8ac78b2c43c8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.422676 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.457095 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.539701 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.587359 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.606088 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.762211 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-649675d675-598db"] Oct 07 15:05:34 crc kubenswrapper[4672]: I1007 15:05:34.827330 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20bd297b-c47a-4b56-9581-4b4699b7d1d4-cert\") pod \"openstack-operator-controller-manager-86dbb8967f-5bbbt\" (UID: \"20bd297b-c47a-4b56-9581-4b4699b7d1d4\") " pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:34 crc kubenswrapper[4672]: E1007 15:05:34.827510 4672 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 07 15:05:34 crc kubenswrapper[4672]: E1007 15:05:34.827964 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20bd297b-c47a-4b56-9581-4b4699b7d1d4-cert podName:20bd297b-c47a-4b56-9581-4b4699b7d1d4 nodeName:}" failed. No retries permitted until 2025-10-07 15:05:35.827914869 +0000 UTC m=+1012.803093450 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/20bd297b-c47a-4b56-9581-4b4699b7d1d4-cert") pod "openstack-operator-controller-manager-86dbb8967f-5bbbt" (UID: "20bd297b-c47a-4b56-9581-4b4699b7d1d4") : secret "webhook-server-cert" not found Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.184150 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.195751 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.209190 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.217855 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.225067 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.233927 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6"] Oct 07 15:05:35 crc kubenswrapper[4672]: W1007 15:05:35.247364 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8366fa08_0b1b_49f3_8ac1_7df869356e24.slice/crio-a52bd143e863490d0876ee77edbf4739b4a17bdf289183e9f0f27d7029926b57 WatchSource:0}: Error finding container a52bd143e863490d0876ee77edbf4739b4a17bdf289183e9f0f27d7029926b57: Status 404 returned error can't find the container with id a52bd143e863490d0876ee77edbf4739b4a17bdf289183e9f0f27d7029926b57 Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.308346 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" event={"ID":"00724d85-8a20-4114-9c19-10171b42d9d1","Type":"ContainerStarted","Data":"8be97a995e7bc020a6c725309d7d446266e9ecfb2d7653c97b13c284ab48eb98"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.314513 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" event={"ID":"24fa96bf-c94d-4e2c-974a-d00f03de100d","Type":"ContainerStarted","Data":"84090ca53b6502fe4238c290c7290888c6b1cbaba860bed4f582a534423c586a"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.321822 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" event={"ID":"b5f8d795-a31d-4992-99fc-590848eae6fd","Type":"ContainerStarted","Data":"9857592bc25ebfc679c2d4ddccce50bfc359a7172660646e3d5df81959f7d172"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.322926 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" event={"ID":"8366fa08-0b1b-49f3-8ac1-7df869356e24","Type":"ContainerStarted","Data":"a52bd143e863490d0876ee77edbf4739b4a17bdf289183e9f0f27d7029926b57"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.324494 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" event={"ID":"fba2eb6a-4cb0-4fc9-9625-e7a57382e412","Type":"ContainerStarted","Data":"d642c5a4f06b126fc519f4c979c754d58f38542d157b61264e5f2b6ebd786ba9"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.325566 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" event={"ID":"764c99ab-d28b-4a93-b2e6-5abdef46cde8","Type":"ContainerStarted","Data":"9b6a85703cc9d40fdd2849d1d2e364c4773f0d8bbcfa1e732014df8445f49b75"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.326467 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" event={"ID":"a70458ed-18c5-49ef-8e30-83e39c3ec5e5","Type":"ContainerStarted","Data":"903399e23c4ccb78f976055562f0c056428e3ad3713824882f8c1692fbd1dfa5"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.327236 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" event={"ID":"8b98122f-1fe5-456b-9e60-e0ac676afbfc","Type":"ContainerStarted","Data":"f7bfd5c9eb5f1cf4bc6ea7884603e8c165b70d4fcd61c5d9170280ba2acf7a73"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.327890 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" event={"ID":"255dad32-3ed4-49eb-8e4d-6cc40d83acc7","Type":"ContainerStarted","Data":"5ba903e3a572bef41d6d380b69cbc54c8e1c36a5f758384f4406b5121d2b0c7a"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.328689 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" event={"ID":"8c3d7854-7f93-46f3-aa4c-1c26dc987cbe","Type":"ContainerStarted","Data":"d988ce545e14e6e2c0a695e901c6be359818076126cd1606170ca31c2def7473"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.334363 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" event={"ID":"47db8f43-eb79-4338-88e1-1b464c8de306","Type":"ContainerStarted","Data":"8ea23dfd699857e14f2f612c657f7085e3fc7e20156a5f919c090289de86bbab"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.336553 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" event={"ID":"d427841d-eba8-45b1-aa18-de4a5d1fecaa","Type":"ContainerStarted","Data":"db397c704c0d4e42038ff8d162afcc9cf47ad61fd7e4e51f631d6c2dd8da1cb6"} Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.387620 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.396295 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk"] Oct 07 15:05:35 crc kubenswrapper[4672]: W1007 15:05:35.399137 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5422dad7_a0a4_4116_bee5_8e5580d50530.slice/crio-0c822ccfd8f898e47657c9a9dbc528df48dbc0ea1df18d201958c1745b7ac5ce WatchSource:0}: Error finding container 0c822ccfd8f898e47657c9a9dbc528df48dbc0ea1df18d201958c1745b7ac5ce: Status 404 returned error can't find the container with id 0c822ccfd8f898e47657c9a9dbc528df48dbc0ea1df18d201958c1745b7ac5ce Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.402605 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.406518 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.410371 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl"] Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.416144 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:f37e29d1f621c23c0d77b09076006d1e8002a77c2ff3d9b8921f893221cb1d09,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g6vpp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-6d8b6f9b9-g8n44_openstack-operators(c5d2b5d0-7471-41e8-a5f9-7930a07fb483): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.416196 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x4q7z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-658588b8c9-8f4wl_openstack-operators(2186b39e-fac1-49ed-a0d3-d925a4a7c2e6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.436326 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46\" (UID: \"51779ddc-1795-45cb-8ba8-8ac78b2c43c8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.442276 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/51779ddc-1795-45cb-8ba8-8ac78b2c43c8-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46\" (UID: \"51779ddc-1795-45cb-8ba8-8ac78b2c43c8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.527040 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct"] Oct 07 15:05:35 crc kubenswrapper[4672]: W1007 15:05:35.536884 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2815da8e_e8ee_45b7_a971_b36721ba4322.slice/crio-aa8b2ebb6514a7ed466145f6702b3069ab14df956d86a61ec7e924690a313351 WatchSource:0}: Error finding container aa8b2ebb6514a7ed466145f6702b3069ab14df956d86a61ec7e924690a313351: Status 404 returned error can't find the container with id aa8b2ebb6514a7ed466145f6702b3069ab14df956d86a61ec7e924690a313351 Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.537663 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.538314 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:35 crc kubenswrapper[4672]: W1007 15:05:35.540244 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58c50b3b_c174_42d4_bcc1_d76b0a93cd58.slice/crio-cbc04f41008a02fef58f5b0546abfa8306f666fce7520510c3d5bc52f7997252 WatchSource:0}: Error finding container cbc04f41008a02fef58f5b0546abfa8306f666fce7520510c3d5bc52f7997252: Status 404 returned error can't find the container with id cbc04f41008a02fef58f5b0546abfa8306f666fce7520510c3d5bc52f7997252 Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.543849 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5"] Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.548988 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq"] Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.564227 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vqkwx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6cbc6dd547-27xdb_openstack-operators(58c50b3b-c174-42d4-bcc1-d76b0a93cd58): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.564474 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d559l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-j8bct_openstack-operators(2815da8e-e8ee-45b7-a971-b36721ba4322): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.565812 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" podUID="2815da8e-e8ee-45b7-a971-b36721ba4322" Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.574958 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xknpv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5d4d74dd89-sfdxq_openstack-operators(9c7d5ea5-33a0-4006-b116-8cba83443c79): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.767898 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" podUID="c5d2b5d0-7471-41e8-a5f9-7930a07fb483" Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.781719 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" podUID="9c7d5ea5-33a0-4006-b116-8cba83443c79" Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.789269 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" podUID="58c50b3b-c174-42d4-bcc1-d76b0a93cd58" Oct 07 15:05:35 crc kubenswrapper[4672]: E1007 15:05:35.802981 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" podUID="2186b39e-fac1-49ed-a0d3-d925a4a7c2e6" Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.843569 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20bd297b-c47a-4b56-9581-4b4699b7d1d4-cert\") pod \"openstack-operator-controller-manager-86dbb8967f-5bbbt\" (UID: \"20bd297b-c47a-4b56-9581-4b4699b7d1d4\") " pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:35 crc kubenswrapper[4672]: I1007 15:05:35.849660 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20bd297b-c47a-4b56-9581-4b4699b7d1d4-cert\") pod \"openstack-operator-controller-manager-86dbb8967f-5bbbt\" (UID: \"20bd297b-c47a-4b56-9581-4b4699b7d1d4\") " pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.011449 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46"] Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.013580 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:36 crc kubenswrapper[4672]: W1007 15:05:36.044458 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51779ddc_1795_45cb_8ba8_8ac78b2c43c8.slice/crio-6f5929ba1dcadf361a193ed116e9e2c1578c4f3f4d92dbe60b56c76d3cf650e8 WatchSource:0}: Error finding container 6f5929ba1dcadf361a193ed116e9e2c1578c4f3f4d92dbe60b56c76d3cf650e8: Status 404 returned error can't find the container with id 6f5929ba1dcadf361a193ed116e9e2c1578c4f3f4d92dbe60b56c76d3cf650e8 Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.375920 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" event={"ID":"58c50b3b-c174-42d4-bcc1-d76b0a93cd58","Type":"ContainerStarted","Data":"2ff864952735c7345ba2882eb8d22933f899dad360b0f5f4e7c55e21667a2ff0"} Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.376309 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" event={"ID":"58c50b3b-c174-42d4-bcc1-d76b0a93cd58","Type":"ContainerStarted","Data":"cbc04f41008a02fef58f5b0546abfa8306f666fce7520510c3d5bc52f7997252"} Oct 07 15:05:36 crc kubenswrapper[4672]: E1007 15:05:36.378412 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" podUID="58c50b3b-c174-42d4-bcc1-d76b0a93cd58" Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.379281 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" event={"ID":"2815da8e-e8ee-45b7-a971-b36721ba4322","Type":"ContainerStarted","Data":"aa8b2ebb6514a7ed466145f6702b3069ab14df956d86a61ec7e924690a313351"} Oct 07 15:05:36 crc kubenswrapper[4672]: E1007 15:05:36.381792 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" podUID="2815da8e-e8ee-45b7-a971-b36721ba4322" Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.384225 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" event={"ID":"a0710ae8-d5bc-4f95-a4ec-76128a3916bb","Type":"ContainerStarted","Data":"af24b4f145534ee06680c8276572ee73e4d2c87bc8e752f212822428e2140721"} Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.388589 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" event={"ID":"51779ddc-1795-45cb-8ba8-8ac78b2c43c8","Type":"ContainerStarted","Data":"6f5929ba1dcadf361a193ed116e9e2c1578c4f3f4d92dbe60b56c76d3cf650e8"} Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.392233 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" event={"ID":"58058dbe-db1a-41b4-8643-21f790efaac3","Type":"ContainerStarted","Data":"6cfb1e5b82d41e4448038077cd2f70d07b2b4abac4a637f70a6ed3d0bbc5cc9f"} Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.403846 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" event={"ID":"5422dad7-a0a4-4116-bee5-8e5580d50530","Type":"ContainerStarted","Data":"0c822ccfd8f898e47657c9a9dbc528df48dbc0ea1df18d201958c1745b7ac5ce"} Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.410769 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" event={"ID":"c5d2b5d0-7471-41e8-a5f9-7930a07fb483","Type":"ContainerStarted","Data":"cd25174f31876be9762d035712d266bdc03cddd346887174c2db83fabd996d41"} Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.410821 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" event={"ID":"c5d2b5d0-7471-41e8-a5f9-7930a07fb483","Type":"ContainerStarted","Data":"e865633f5d09456bc7b4f584c2bd616f58c29fb9b86baaca7458a67caa665add"} Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.416378 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" event={"ID":"eae78c95-e2b0-4fdb-8b01-bc446045704f","Type":"ContainerStarted","Data":"8650fb8947f6c2e883df0531ab35d0340bcb242e0d7499b312e216f57791fb7a"} Oct 07 15:05:36 crc kubenswrapper[4672]: E1007 15:05:36.416381 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:f37e29d1f621c23c0d77b09076006d1e8002a77c2ff3d9b8921f893221cb1d09\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" podUID="c5d2b5d0-7471-41e8-a5f9-7930a07fb483" Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.420244 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" event={"ID":"9c7d5ea5-33a0-4006-b116-8cba83443c79","Type":"ContainerStarted","Data":"32ca6109e6f543bb85b65ebe3f27518b57355592f08f7a263fcb95be3c1fbf7e"} Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.420498 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" event={"ID":"9c7d5ea5-33a0-4006-b116-8cba83443c79","Type":"ContainerStarted","Data":"37064592abcec2fc235f6fb416167a87493615268cff5a50f63d9334e20f2744"} Oct 07 15:05:36 crc kubenswrapper[4672]: E1007 15:05:36.422548 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" podUID="9c7d5ea5-33a0-4006-b116-8cba83443c79" Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.423265 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" event={"ID":"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6","Type":"ContainerStarted","Data":"70087964775e79a452a94631861fab3b13a4c6aa5248437498477d25f7994be3"} Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.423296 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" event={"ID":"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6","Type":"ContainerStarted","Data":"7cb237990a0e9842b13fba432039686fb32ebbcaf7520cc5a3ea38a5d1b2847c"} Oct 07 15:05:36 crc kubenswrapper[4672]: E1007 15:05:36.425210 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" podUID="2186b39e-fac1-49ed-a0d3-d925a4a7c2e6" Oct 07 15:05:36 crc kubenswrapper[4672]: I1007 15:05:36.521733 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt"] Oct 07 15:05:37 crc kubenswrapper[4672]: I1007 15:05:37.430802 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" event={"ID":"20bd297b-c47a-4b56-9581-4b4699b7d1d4","Type":"ContainerStarted","Data":"e6ac33d933570db09fc6acae842032b66cbf258bcc02ae1f103c56e3a036f4bb"} Oct 07 15:05:37 crc kubenswrapper[4672]: E1007 15:05:37.432216 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" podUID="2186b39e-fac1-49ed-a0d3-d925a4a7c2e6" Oct 07 15:05:37 crc kubenswrapper[4672]: E1007 15:05:37.432252 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" podUID="9c7d5ea5-33a0-4006-b116-8cba83443c79" Oct 07 15:05:37 crc kubenswrapper[4672]: E1007 15:05:37.432388 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:f37e29d1f621c23c0d77b09076006d1e8002a77c2ff3d9b8921f893221cb1d09\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" podUID="c5d2b5d0-7471-41e8-a5f9-7930a07fb483" Oct 07 15:05:37 crc kubenswrapper[4672]: E1007 15:05:37.432595 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" podUID="2815da8e-e8ee-45b7-a971-b36721ba4322" Oct 07 15:05:37 crc kubenswrapper[4672]: E1007 15:05:37.433136 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" podUID="58c50b3b-c174-42d4-bcc1-d76b0a93cd58" Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.485080 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" event={"ID":"764c99ab-d28b-4a93-b2e6-5abdef46cde8","Type":"ContainerStarted","Data":"ba4a24b0c4025da5452d1fd3ceea8a7d2b90048335cddfe908f93520fc6934e6"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.492142 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" event={"ID":"a70458ed-18c5-49ef-8e30-83e39c3ec5e5","Type":"ContainerStarted","Data":"aed8b3e6cd279422fe8b7973d430020b8c62f3cc9691f99a18d5430d28099452"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.502594 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" event={"ID":"255dad32-3ed4-49eb-8e4d-6cc40d83acc7","Type":"ContainerStarted","Data":"c2af4d4231800063647372d036ec5fc2614d43622fc2325c1dce686e3b550ddc"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.509072 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" event={"ID":"d427841d-eba8-45b1-aa18-de4a5d1fecaa","Type":"ContainerStarted","Data":"c82b46786d9e03ad30d87c4f63bc8d1e7f6d485ab0d97055831e593cb99227a2"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.529666 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" event={"ID":"00724d85-8a20-4114-9c19-10171b42d9d1","Type":"ContainerStarted","Data":"cf98aadafe4dbc04306d464a19445eac7e6b37bb6d7a70822a54574764c5feed"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.549727 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" event={"ID":"a0710ae8-d5bc-4f95-a4ec-76128a3916bb","Type":"ContainerStarted","Data":"077b3ab9e5dfb27f141f72770fdedbd33984bb409a2bc9ba500f029a7fc42d43"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.566596 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" event={"ID":"24fa96bf-c94d-4e2c-974a-d00f03de100d","Type":"ContainerStarted","Data":"8a05d46549153d37c065a7d0706feb6d258da7c3465be93150897013e8bc2b0a"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.579885 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" event={"ID":"b5f8d795-a31d-4992-99fc-590848eae6fd","Type":"ContainerStarted","Data":"119de93b10c2264cfc2739bd9997288221d891585baf303d9ca146ca8220ad5b"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.588570 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" event={"ID":"20bd297b-c47a-4b56-9581-4b4699b7d1d4","Type":"ContainerStarted","Data":"3d8b859119a37c94cabb5aefe00f61f4f10745f4b8690cb441d9013b9b2cf80c"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.602168 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" event={"ID":"eae78c95-e2b0-4fdb-8b01-bc446045704f","Type":"ContainerStarted","Data":"965078e49bb1dadca6455c85fb3d0aa04e721a84480f145b6913ca15ff090dbf"} Oct 07 15:05:44 crc kubenswrapper[4672]: I1007 15:05:44.618825 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" event={"ID":"8c3d7854-7f93-46f3-aa4c-1c26dc987cbe","Type":"ContainerStarted","Data":"4fdc21fb0c6dad0c0f54c8264d3b29ec5c372d889308d7e601ba9ea652c203ce"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.627988 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" event={"ID":"5422dad7-a0a4-4116-bee5-8e5580d50530","Type":"ContainerStarted","Data":"54b14081636632197f4c151a94b287697cc77483d142c70fcc11e361aa249522"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.628423 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" event={"ID":"5422dad7-a0a4-4116-bee5-8e5580d50530","Type":"ContainerStarted","Data":"51f82ccbb9455a46b14efced548114bc206f950fba74b474e1e703d18cc43844"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.628697 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.639755 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" event={"ID":"8b98122f-1fe5-456b-9e60-e0ac676afbfc","Type":"ContainerStarted","Data":"ba2400a86001d5514114bde47ec928db9f204549365e3b84381a4c361f735634"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.639798 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" event={"ID":"8b98122f-1fe5-456b-9e60-e0ac676afbfc","Type":"ContainerStarted","Data":"eb6d9bd242d4fc40278fdea6454302ea992de37f7602bc39717f8d9b495df294"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.639816 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.656227 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" event={"ID":"eae78c95-e2b0-4fdb-8b01-bc446045704f","Type":"ContainerStarted","Data":"74f8402616c1f2c61694643e814fbd0c443b5abf54add4bdea5823ef36d35278"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.656848 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.664659 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" event={"ID":"47db8f43-eb79-4338-88e1-1b464c8de306","Type":"ContainerStarted","Data":"8d1479eb5ab9f00994b09dabc5889fa436bb7dcbf371c55c7ac85312ed7587bd"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.664702 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" event={"ID":"47db8f43-eb79-4338-88e1-1b464c8de306","Type":"ContainerStarted","Data":"eaaee7ba6baff81cc612160858b32f2054861e0728244ad3a223c7429f20e6a7"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.665299 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.688032 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" podStartSLOduration=3.8157608229999997 podStartE2EDuration="12.688001287s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:34.830549304 +0000 UTC m=+1011.805727885" lastFinishedPulling="2025-10-07 15:05:43.702789768 +0000 UTC m=+1020.677968349" observedRunningTime="2025-10-07 15:05:45.686681809 +0000 UTC m=+1022.661860380" watchObservedRunningTime="2025-10-07 15:05:45.688001287 +0000 UTC m=+1022.663179878" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.689425 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" podStartSLOduration=4.389417509 podStartE2EDuration="12.689416208s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.404855489 +0000 UTC m=+1012.380034070" lastFinishedPulling="2025-10-07 15:05:43.704854188 +0000 UTC m=+1020.680032769" observedRunningTime="2025-10-07 15:05:45.654413983 +0000 UTC m=+1022.629592564" watchObservedRunningTime="2025-10-07 15:05:45.689416208 +0000 UTC m=+1022.664594789" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.693252 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" event={"ID":"8366fa08-0b1b-49f3-8ac1-7df869356e24","Type":"ContainerStarted","Data":"368d5675ef2cc66d1de4597cf820b7df5e4ec701e79beeb7bd25d522aed08a85"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.693287 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" event={"ID":"8366fa08-0b1b-49f3-8ac1-7df869356e24","Type":"ContainerStarted","Data":"518660cacbdf6476b5c6b3444690af115daa2bea40bc60de66da021532fb0ad2"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.693827 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.706821 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" event={"ID":"51779ddc-1795-45cb-8ba8-8ac78b2c43c8","Type":"ContainerStarted","Data":"0c48a25dbfbbaae6c58da5645c0f5f500e09edf50c0ed8d224b5fa4801ded0a0"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.706863 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" event={"ID":"51779ddc-1795-45cb-8ba8-8ac78b2c43c8","Type":"ContainerStarted","Data":"915dfb7b1e2c0585395d17657a46db9e6e0b64be40b85ad15c925b17af23d383"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.707127 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.710474 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" event={"ID":"58058dbe-db1a-41b4-8643-21f790efaac3","Type":"ContainerStarted","Data":"769af1e7a9b441e29799410ac7d3012dc01603b409beefc245b2d589df5729a4"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.710637 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.712724 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" podStartSLOduration=3.7535836270000003 podStartE2EDuration="12.712710887s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:34.715255483 +0000 UTC m=+1011.690434064" lastFinishedPulling="2025-10-07 15:05:43.674382733 +0000 UTC m=+1020.649561324" observedRunningTime="2025-10-07 15:05:45.709573857 +0000 UTC m=+1022.684752458" watchObservedRunningTime="2025-10-07 15:05:45.712710887 +0000 UTC m=+1022.687889478" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.713609 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" event={"ID":"fba2eb6a-4cb0-4fc9-9625-e7a57382e412","Type":"ContainerStarted","Data":"470cacd73d0ed2bca3085e9d35468e0a20376b571eb879ff5bc2f00b92292ac4"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.713656 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" event={"ID":"fba2eb6a-4cb0-4fc9-9625-e7a57382e412","Type":"ContainerStarted","Data":"9f44cc2c8abcfe20c9043efdff7a338577fdcc1f176deda71536eadaf576b2cd"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.714255 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.716327 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" event={"ID":"a70458ed-18c5-49ef-8e30-83e39c3ec5e5","Type":"ContainerStarted","Data":"7fc11725c4944aa0cacb0ae4cf37711c4c8dd511ff638c1a267d896abbeaa7ec"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.716812 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.718462 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" event={"ID":"a0710ae8-d5bc-4f95-a4ec-76128a3916bb","Type":"ContainerStarted","Data":"f2606849fc598e1faa0363ed291879d5fa0faba4f001ecd69794d6ad5cd00765"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.719093 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.720376 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" event={"ID":"b5f8d795-a31d-4992-99fc-590848eae6fd","Type":"ContainerStarted","Data":"b79a303afddcf019a7265b73d2c9f471d22a51f2a728706b3bc93375f9709dd6"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.721032 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.726544 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" event={"ID":"8c3d7854-7f93-46f3-aa4c-1c26dc987cbe","Type":"ContainerStarted","Data":"0279e7fd37a7af35f71c5b5a4f2eaac0eff72bd1e641c8df3db19de456e9713b"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.727326 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.731110 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" event={"ID":"764c99ab-d28b-4a93-b2e6-5abdef46cde8","Type":"ContainerStarted","Data":"71f8d1bbabb67100833d2ab8adc2a7776f3b3241919130ae37fca44a75c069db"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.731485 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" podStartSLOduration=4.662735449 podStartE2EDuration="12.731472596s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.563920637 +0000 UTC m=+1012.539099218" lastFinishedPulling="2025-10-07 15:05:43.632657774 +0000 UTC m=+1020.607836365" observedRunningTime="2025-10-07 15:05:45.730699864 +0000 UTC m=+1022.705878445" watchObservedRunningTime="2025-10-07 15:05:45.731472596 +0000 UTC m=+1022.706651177" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.731756 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.740142 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" event={"ID":"20bd297b-c47a-4b56-9581-4b4699b7d1d4","Type":"ContainerStarted","Data":"fdd6df0eb2b334f9ae71472fcfb46f1a9a4740895fbba73bb15adc12d525b0be"} Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.740723 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.754570 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" podStartSLOduration=3.724533873 podStartE2EDuration="12.754556149s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:34.636231503 +0000 UTC m=+1011.611410074" lastFinishedPulling="2025-10-07 15:05:43.666253769 +0000 UTC m=+1020.641432350" observedRunningTime="2025-10-07 15:05:45.75252271 +0000 UTC m=+1022.727701311" watchObservedRunningTime="2025-10-07 15:05:45.754556149 +0000 UTC m=+1022.729734730" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.819209 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" podStartSLOduration=5.148196712 podStartE2EDuration="12.819184725s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:36.058849852 +0000 UTC m=+1013.034028443" lastFinishedPulling="2025-10-07 15:05:43.729837875 +0000 UTC m=+1020.705016456" observedRunningTime="2025-10-07 15:05:45.79081404 +0000 UTC m=+1022.765992631" watchObservedRunningTime="2025-10-07 15:05:45.819184725 +0000 UTC m=+1022.794363306" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.822152 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" podStartSLOduration=4.496083493 podStartE2EDuration="12.82214229s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.412722985 +0000 UTC m=+1012.387901566" lastFinishedPulling="2025-10-07 15:05:43.738781782 +0000 UTC m=+1020.713960363" observedRunningTime="2025-10-07 15:05:45.817842827 +0000 UTC m=+1022.793021418" watchObservedRunningTime="2025-10-07 15:05:45.82214229 +0000 UTC m=+1022.797320871" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.843342 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" podStartSLOduration=3.508064196 podStartE2EDuration="12.843320218s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:34.37741433 +0000 UTC m=+1011.352592911" lastFinishedPulling="2025-10-07 15:05:43.712670352 +0000 UTC m=+1020.687848933" observedRunningTime="2025-10-07 15:05:45.842172945 +0000 UTC m=+1022.817351526" watchObservedRunningTime="2025-10-07 15:05:45.843320218 +0000 UTC m=+1022.818498799" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.861190 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" podStartSLOduration=4.440172738 podStartE2EDuration="12.861172421s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.253335578 +0000 UTC m=+1012.228514159" lastFinishedPulling="2025-10-07 15:05:43.674335261 +0000 UTC m=+1020.649513842" observedRunningTime="2025-10-07 15:05:45.858995539 +0000 UTC m=+1022.834174130" watchObservedRunningTime="2025-10-07 15:05:45.861172421 +0000 UTC m=+1022.836351002" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.885209 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" podStartSLOduration=4.59344299 podStartE2EDuration="12.885192841s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.413635332 +0000 UTC m=+1012.388813913" lastFinishedPulling="2025-10-07 15:05:43.705385183 +0000 UTC m=+1020.680563764" observedRunningTime="2025-10-07 15:05:45.880954659 +0000 UTC m=+1022.856133240" watchObservedRunningTime="2025-10-07 15:05:45.885192841 +0000 UTC m=+1022.860371422" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.901714 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" podStartSLOduration=3.576520102 podStartE2EDuration="12.901699475s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:34.308219462 +0000 UTC m=+1011.283398043" lastFinishedPulling="2025-10-07 15:05:43.633398835 +0000 UTC m=+1020.608577416" observedRunningTime="2025-10-07 15:05:45.899919984 +0000 UTC m=+1022.875098575" watchObservedRunningTime="2025-10-07 15:05:45.901699475 +0000 UTC m=+1022.876878056" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.925849 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" podStartSLOduration=4.466545395 podStartE2EDuration="12.925828058s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.244429222 +0000 UTC m=+1012.219607803" lastFinishedPulling="2025-10-07 15:05:43.703711895 +0000 UTC m=+1020.678890466" observedRunningTime="2025-10-07 15:05:45.922788541 +0000 UTC m=+1022.897967132" watchObservedRunningTime="2025-10-07 15:05:45.925828058 +0000 UTC m=+1022.901006639" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.980101 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" podStartSLOduration=4.535911708 podStartE2EDuration="12.980083837s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.222622986 +0000 UTC m=+1012.197801567" lastFinishedPulling="2025-10-07 15:05:43.666795105 +0000 UTC m=+1020.641973696" observedRunningTime="2025-10-07 15:05:45.978427019 +0000 UTC m=+1022.953605600" watchObservedRunningTime="2025-10-07 15:05:45.980083837 +0000 UTC m=+1022.955262408" Oct 07 15:05:45 crc kubenswrapper[4672]: I1007 15:05:45.980508 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" podStartSLOduration=12.980503459 podStartE2EDuration="12.980503459s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:05:45.960954097 +0000 UTC m=+1022.936132688" watchObservedRunningTime="2025-10-07 15:05:45.980503459 +0000 UTC m=+1022.955682030" Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.750462 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" event={"ID":"58058dbe-db1a-41b4-8643-21f790efaac3","Type":"ContainerStarted","Data":"fc4e98448668a0309be28b73d8cf998cc4c54d1639fae0330a99b0499571770c"} Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.752272 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" event={"ID":"d427841d-eba8-45b1-aa18-de4a5d1fecaa","Type":"ContainerStarted","Data":"298f5f4997bc7042a37dee9e0e6d697cd0852c4da7f875ea5f69d4018f0f907b"} Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.752319 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.754321 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" event={"ID":"00724d85-8a20-4114-9c19-10171b42d9d1","Type":"ContainerStarted","Data":"2349db537e04fdc586fb8ddd85593fc7a055ae3a85fbaa73403d3439827d6366"} Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.754475 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.756515 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" event={"ID":"255dad32-3ed4-49eb-8e4d-6cc40d83acc7","Type":"ContainerStarted","Data":"c940fc4226f858ae861ac950560ff86ed5b03fafecf664cae14b88eaade1851b"} Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.756591 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.758607 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" event={"ID":"24fa96bf-c94d-4e2c-974a-d00f03de100d","Type":"ContainerStarted","Data":"5fd931762c4f3d2bd3d9b70e37afa714753ea9fe2d7ee2c56f610f35fe3ec454"} Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.781665 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" podStartSLOduration=4.681630512 podStartE2EDuration="13.781638278s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:34.533376339 +0000 UTC m=+1011.508554920" lastFinishedPulling="2025-10-07 15:05:43.633384085 +0000 UTC m=+1020.608562686" observedRunningTime="2025-10-07 15:05:46.772162145 +0000 UTC m=+1023.747340736" watchObservedRunningTime="2025-10-07 15:05:46.781638278 +0000 UTC m=+1023.756816869" Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.796526 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" podStartSLOduration=5.298205892 podStartE2EDuration="13.796505095s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.208284694 +0000 UTC m=+1012.183463275" lastFinishedPulling="2025-10-07 15:05:43.706583897 +0000 UTC m=+1020.681762478" observedRunningTime="2025-10-07 15:05:46.791324056 +0000 UTC m=+1023.766502647" watchObservedRunningTime="2025-10-07 15:05:46.796505095 +0000 UTC m=+1023.771683666" Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.812329 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" podStartSLOduration=5.377796407 podStartE2EDuration="13.812291268s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.239931153 +0000 UTC m=+1012.215109734" lastFinishedPulling="2025-10-07 15:05:43.674426014 +0000 UTC m=+1020.649604595" observedRunningTime="2025-10-07 15:05:46.807900452 +0000 UTC m=+1023.783079033" watchObservedRunningTime="2025-10-07 15:05:46.812291268 +0000 UTC m=+1023.787469909" Oct 07 15:05:46 crc kubenswrapper[4672]: I1007 15:05:46.825188 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" podStartSLOduration=5.432524969 podStartE2EDuration="13.825172968s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.240076507 +0000 UTC m=+1012.215255088" lastFinishedPulling="2025-10-07 15:05:43.632724506 +0000 UTC m=+1020.607903087" observedRunningTime="2025-10-07 15:05:46.823202181 +0000 UTC m=+1023.798380812" watchObservedRunningTime="2025-10-07 15:05:46.825172968 +0000 UTC m=+1023.800351549" Oct 07 15:05:47 crc kubenswrapper[4672]: I1007 15:05:47.766571 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" Oct 07 15:05:49 crc kubenswrapper[4672]: I1007 15:05:49.778740 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" event={"ID":"c5d2b5d0-7471-41e8-a5f9-7930a07fb483","Type":"ContainerStarted","Data":"4e6c7a36a8c7f95d638b4f864fe0512cfa87b64696c6dffab1cad63d7e5249a6"} Oct 07 15:05:49 crc kubenswrapper[4672]: I1007 15:05:49.780229 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" Oct 07 15:05:49 crc kubenswrapper[4672]: I1007 15:05:49.799945 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" podStartSLOduration=2.933652227 podStartE2EDuration="16.799924458s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.415885556 +0000 UTC m=+1012.391064127" lastFinishedPulling="2025-10-07 15:05:49.282157777 +0000 UTC m=+1026.257336358" observedRunningTime="2025-10-07 15:05:49.792914977 +0000 UTC m=+1026.768093598" watchObservedRunningTime="2025-10-07 15:05:49.799924458 +0000 UTC m=+1026.775103059" Oct 07 15:05:51 crc kubenswrapper[4672]: I1007 15:05:51.794962 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" event={"ID":"58c50b3b-c174-42d4-bcc1-d76b0a93cd58","Type":"ContainerStarted","Data":"1a5c6425d76d4252b55da325540cfa2d877c47c45a4509af5886f312b2cbdd92"} Oct 07 15:05:51 crc kubenswrapper[4672]: I1007 15:05:51.795509 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" Oct 07 15:05:51 crc kubenswrapper[4672]: I1007 15:05:51.814085 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" podStartSLOduration=3.199510032 podStartE2EDuration="18.814064557s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.563926537 +0000 UTC m=+1012.539105118" lastFinishedPulling="2025-10-07 15:05:51.178481062 +0000 UTC m=+1028.153659643" observedRunningTime="2025-10-07 15:05:51.813677996 +0000 UTC m=+1028.788856577" watchObservedRunningTime="2025-10-07 15:05:51.814064557 +0000 UTC m=+1028.789243138" Oct 07 15:05:52 crc kubenswrapper[4672]: I1007 15:05:52.805349 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" event={"ID":"9c7d5ea5-33a0-4006-b116-8cba83443c79","Type":"ContainerStarted","Data":"e92be51f1e5815736930383501966cbba59b827dd5d699aa62351b8857416500"} Oct 07 15:05:52 crc kubenswrapper[4672]: I1007 15:05:52.805894 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" Oct 07 15:05:52 crc kubenswrapper[4672]: I1007 15:05:52.828119 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" podStartSLOduration=3.120440482 podStartE2EDuration="19.828101962s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.57480735 +0000 UTC m=+1012.549985921" lastFinishedPulling="2025-10-07 15:05:52.28246882 +0000 UTC m=+1029.257647401" observedRunningTime="2025-10-07 15:05:52.823717536 +0000 UTC m=+1029.798896167" watchObservedRunningTime="2025-10-07 15:05:52.828101962 +0000 UTC m=+1029.803280543" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.514480 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-hvp85" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.533726 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-jnwxm" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.551547 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-l4zjl" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.574414 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-2mddh" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.604437 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-hbh5b" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.638258 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-m7vrt" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.672742 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-649675d675-598db" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.782374 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-72pgl" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.818838 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" event={"ID":"2815da8e-e8ee-45b7-a971-b36721ba4322","Type":"ContainerStarted","Data":"7d6c7668a303302a6133f22c6ad7df6840290a4d53dc1fb64222537d4ef26789"} Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.837679 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-j8bct" podStartSLOduration=2.112923404 podStartE2EDuration="19.837657257s" podCreationTimestamp="2025-10-07 15:05:34 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.56438422 +0000 UTC m=+1012.539562801" lastFinishedPulling="2025-10-07 15:05:53.289118073 +0000 UTC m=+1030.264296654" observedRunningTime="2025-10-07 15:05:53.834169397 +0000 UTC m=+1030.809347978" watchObservedRunningTime="2025-10-07 15:05:53.837657257 +0000 UTC m=+1030.812835838" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.911296 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-44jhh" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.924853 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-l2zj6" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.949865 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq" Oct 07 15:05:53 crc kubenswrapper[4672]: I1007 15:05:53.993499 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-9rcgh" Oct 07 15:05:54 crc kubenswrapper[4672]: I1007 15:05:54.026177 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-w89qd" Oct 07 15:05:54 crc kubenswrapper[4672]: I1007 15:05:54.095656 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-g8n44" Oct 07 15:05:54 crc kubenswrapper[4672]: I1007 15:05:54.121765 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-xzhc4" Oct 07 15:05:54 crc kubenswrapper[4672]: I1007 15:05:54.140195 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9zgt5" Oct 07 15:05:54 crc kubenswrapper[4672]: I1007 15:05:54.209997 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-2z4fk" Oct 07 15:05:54 crc kubenswrapper[4672]: I1007 15:05:54.826966 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" event={"ID":"2186b39e-fac1-49ed-a0d3-d925a4a7c2e6","Type":"ContainerStarted","Data":"b0848292d25e55e62cb8dbff8ba194ea92677d3e8bc713f8f5581b2b861e6804"} Oct 07 15:05:54 crc kubenswrapper[4672]: I1007 15:05:54.827200 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:05:54 crc kubenswrapper[4672]: I1007 15:05:54.846316 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" podStartSLOduration=2.611486154 podStartE2EDuration="21.846295497s" podCreationTimestamp="2025-10-07 15:05:33 +0000 UTC" firstStartedPulling="2025-10-07 15:05:35.415980639 +0000 UTC m=+1012.391159220" lastFinishedPulling="2025-10-07 15:05:54.650789982 +0000 UTC m=+1031.625968563" observedRunningTime="2025-10-07 15:05:54.845290678 +0000 UTC m=+1031.820469259" watchObservedRunningTime="2025-10-07 15:05:54.846295497 +0000 UTC m=+1031.821474078" Oct 07 15:05:55 crc kubenswrapper[4672]: I1007 15:05:55.545960 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46" Oct 07 15:05:56 crc kubenswrapper[4672]: I1007 15:05:56.018613 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-86dbb8967f-5bbbt" Oct 07 15:06:04 crc kubenswrapper[4672]: I1007 15:06:04.152130 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-sfdxq" Oct 07 15:06:04 crc kubenswrapper[4672]: I1007 15:06:04.299699 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-8f4wl" Oct 07 15:06:04 crc kubenswrapper[4672]: I1007 15:06:04.459717 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-27xdb" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.292130 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67f8579c9-dwxzf"] Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.293909 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.297513 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67f8579c9-dwxzf"] Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.298228 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.299242 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-wzkc9" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.299467 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.299581 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.371852 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5468b776f7-95s9w"] Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.374413 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.381724 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.388055 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xxz2\" (UniqueName: \"kubernetes.io/projected/b8f07978-c49f-4c4d-919b-e46edeedc3fa-kube-api-access-7xxz2\") pod \"dnsmasq-dns-67f8579c9-dwxzf\" (UID: \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\") " pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.388116 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f07978-c49f-4c4d-919b-e46edeedc3fa-config\") pod \"dnsmasq-dns-67f8579c9-dwxzf\" (UID: \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\") " pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.389082 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5468b776f7-95s9w"] Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.489654 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25lw9\" (UniqueName: \"kubernetes.io/projected/38cecc16-036b-451f-a074-faee5c5bcb73-kube-api-access-25lw9\") pod \"dnsmasq-dns-5468b776f7-95s9w\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.489705 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-dns-svc\") pod \"dnsmasq-dns-5468b776f7-95s9w\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.489914 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xxz2\" (UniqueName: \"kubernetes.io/projected/b8f07978-c49f-4c4d-919b-e46edeedc3fa-kube-api-access-7xxz2\") pod \"dnsmasq-dns-67f8579c9-dwxzf\" (UID: \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\") " pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.490078 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-config\") pod \"dnsmasq-dns-5468b776f7-95s9w\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.490130 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f07978-c49f-4c4d-919b-e46edeedc3fa-config\") pod \"dnsmasq-dns-67f8579c9-dwxzf\" (UID: \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\") " pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.491002 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f07978-c49f-4c4d-919b-e46edeedc3fa-config\") pod \"dnsmasq-dns-67f8579c9-dwxzf\" (UID: \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\") " pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.506602 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xxz2\" (UniqueName: \"kubernetes.io/projected/b8f07978-c49f-4c4d-919b-e46edeedc3fa-kube-api-access-7xxz2\") pod \"dnsmasq-dns-67f8579c9-dwxzf\" (UID: \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\") " pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.591165 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25lw9\" (UniqueName: \"kubernetes.io/projected/38cecc16-036b-451f-a074-faee5c5bcb73-kube-api-access-25lw9\") pod \"dnsmasq-dns-5468b776f7-95s9w\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.591228 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-dns-svc\") pod \"dnsmasq-dns-5468b776f7-95s9w\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.591295 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-config\") pod \"dnsmasq-dns-5468b776f7-95s9w\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.592293 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-dns-svc\") pod \"dnsmasq-dns-5468b776f7-95s9w\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.592295 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-config\") pod \"dnsmasq-dns-5468b776f7-95s9w\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.608752 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25lw9\" (UniqueName: \"kubernetes.io/projected/38cecc16-036b-451f-a074-faee5c5bcb73-kube-api-access-25lw9\") pod \"dnsmasq-dns-5468b776f7-95s9w\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.631245 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:22 crc kubenswrapper[4672]: I1007 15:06:22.691979 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:23 crc kubenswrapper[4672]: I1007 15:06:23.052636 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67f8579c9-dwxzf"] Oct 07 15:06:23 crc kubenswrapper[4672]: I1007 15:06:23.061535 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 15:06:23 crc kubenswrapper[4672]: W1007 15:06:23.286092 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38cecc16_036b_451f_a074_faee5c5bcb73.slice/crio-3478c956364a7b378ddd060737c080142c2cee5cb5d8169000e72cfd3cebfad3 WatchSource:0}: Error finding container 3478c956364a7b378ddd060737c080142c2cee5cb5d8169000e72cfd3cebfad3: Status 404 returned error can't find the container with id 3478c956364a7b378ddd060737c080142c2cee5cb5d8169000e72cfd3cebfad3 Oct 07 15:06:23 crc kubenswrapper[4672]: I1007 15:06:23.287860 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5468b776f7-95s9w"] Oct 07 15:06:24 crc kubenswrapper[4672]: I1007 15:06:24.015199 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5468b776f7-95s9w" event={"ID":"38cecc16-036b-451f-a074-faee5c5bcb73","Type":"ContainerStarted","Data":"3478c956364a7b378ddd060737c080142c2cee5cb5d8169000e72cfd3cebfad3"} Oct 07 15:06:24 crc kubenswrapper[4672]: I1007 15:06:24.016615 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" event={"ID":"b8f07978-c49f-4c4d-919b-e46edeedc3fa","Type":"ContainerStarted","Data":"e03e69e15288907c7f9812021fd25c15aad836e2f9e1d3e9a718514365100724"} Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.242974 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5468b776f7-95s9w"] Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.266483 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c5bc7f59c-wl9pc"] Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.267749 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.278244 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5bc7f59c-wl9pc"] Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.430305 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h7mx\" (UniqueName: \"kubernetes.io/projected/99b7e18e-95f8-452c-a4cb-e59b802b309e-kube-api-access-2h7mx\") pod \"dnsmasq-dns-5c5bc7f59c-wl9pc\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.430369 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-config\") pod \"dnsmasq-dns-5c5bc7f59c-wl9pc\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.430413 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-dns-svc\") pod \"dnsmasq-dns-5c5bc7f59c-wl9pc\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.531859 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h7mx\" (UniqueName: \"kubernetes.io/projected/99b7e18e-95f8-452c-a4cb-e59b802b309e-kube-api-access-2h7mx\") pod \"dnsmasq-dns-5c5bc7f59c-wl9pc\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.531949 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-config\") pod \"dnsmasq-dns-5c5bc7f59c-wl9pc\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.532069 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-dns-svc\") pod \"dnsmasq-dns-5c5bc7f59c-wl9pc\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.533385 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-dns-svc\") pod \"dnsmasq-dns-5c5bc7f59c-wl9pc\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.533525 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-config\") pod \"dnsmasq-dns-5c5bc7f59c-wl9pc\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.545290 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67f8579c9-dwxzf"] Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.561779 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h7mx\" (UniqueName: \"kubernetes.io/projected/99b7e18e-95f8-452c-a4cb-e59b802b309e-kube-api-access-2h7mx\") pod \"dnsmasq-dns-5c5bc7f59c-wl9pc\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.568067 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b857bcbc9-w2drn"] Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.569799 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.584252 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b857bcbc9-w2drn"] Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.590230 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.742417 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-dns-svc\") pod \"dnsmasq-dns-5b857bcbc9-w2drn\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.742472 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t6kk\" (UniqueName: \"kubernetes.io/projected/e16c46b7-1761-476e-abd2-1cee19f91e63-kube-api-access-4t6kk\") pod \"dnsmasq-dns-5b857bcbc9-w2drn\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.742512 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-config\") pod \"dnsmasq-dns-5b857bcbc9-w2drn\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.843894 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-dns-svc\") pod \"dnsmasq-dns-5b857bcbc9-w2drn\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.844692 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t6kk\" (UniqueName: \"kubernetes.io/projected/e16c46b7-1761-476e-abd2-1cee19f91e63-kube-api-access-4t6kk\") pod \"dnsmasq-dns-5b857bcbc9-w2drn\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.844740 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-config\") pod \"dnsmasq-dns-5b857bcbc9-w2drn\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.845007 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-dns-svc\") pod \"dnsmasq-dns-5b857bcbc9-w2drn\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.846150 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-config\") pod \"dnsmasq-dns-5b857bcbc9-w2drn\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.891999 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t6kk\" (UniqueName: \"kubernetes.io/projected/e16c46b7-1761-476e-abd2-1cee19f91e63-kube-api-access-4t6kk\") pod \"dnsmasq-dns-5b857bcbc9-w2drn\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:25 crc kubenswrapper[4672]: I1007 15:06:25.917577 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.144680 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5bc7f59c-wl9pc"] Oct 07 15:06:26 crc kubenswrapper[4672]: W1007 15:06:26.154059 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99b7e18e_95f8_452c_a4cb_e59b802b309e.slice/crio-ab99c3cd6d80aa32f8ae7529315c6d801965462e3652121bdef93fdb983ee44e WatchSource:0}: Error finding container ab99c3cd6d80aa32f8ae7529315c6d801965462e3652121bdef93fdb983ee44e: Status 404 returned error can't find the container with id ab99c3cd6d80aa32f8ae7529315c6d801965462e3652121bdef93fdb983ee44e Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.389490 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b857bcbc9-w2drn"] Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.396544 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.398073 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.399990 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.400280 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.400405 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.401403 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.401574 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-q24n8" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.404393 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.404606 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.413225 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.558825 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.558879 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.558900 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.558920 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.558972 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.559001 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.559150 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.559178 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwjjb\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-kube-api-access-wwjjb\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.559228 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.559246 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1afcb6ce-1241-4930-8639-bee8a9a76d11-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.559268 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1afcb6ce-1241-4930-8639-bee8a9a76d11-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.660755 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.660856 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwjjb\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-kube-api-access-wwjjb\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.660896 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.660917 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1afcb6ce-1241-4930-8639-bee8a9a76d11-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.660945 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1afcb6ce-1241-4930-8639-bee8a9a76d11-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.660981 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.661032 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.661058 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.661088 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.661140 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.661173 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.662000 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.662308 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.662535 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.662732 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.663753 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.665686 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.667653 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1afcb6ce-1241-4930-8639-bee8a9a76d11-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.668901 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.669512 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1afcb6ce-1241-4930-8639-bee8a9a76d11-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.707629 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.711252 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.712590 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.713718 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.719732 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.719810 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-5nz5c" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.719985 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.720112 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.720217 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.720337 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.720381 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.725796 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwjjb\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-kube-api-access-wwjjb\") pod \"rabbitmq-cell1-server-0\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.730843 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863546 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4a0a2c2-a878-473c-86da-5a74aa392982-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863644 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863668 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863690 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863719 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wplcs\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-kube-api-access-wplcs\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863738 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863802 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863861 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-config-data\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863879 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863902 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4a0a2c2-a878-473c-86da-5a74aa392982-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.863923 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965086 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-config-data\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965133 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965156 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4a0a2c2-a878-473c-86da-5a74aa392982-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965178 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965206 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4a0a2c2-a878-473c-86da-5a74aa392982-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965221 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965239 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965257 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965283 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wplcs\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-kube-api-access-wplcs\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965302 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.965323 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.966452 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.966909 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.967001 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.967249 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.967579 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-config-data\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.969778 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.970515 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4a0a2c2-a878-473c-86da-5a74aa392982-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.974628 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.984468 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wplcs\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-kube-api-access-wplcs\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:26 crc kubenswrapper[4672]: I1007 15:06:26.990831 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:27 crc kubenswrapper[4672]: I1007 15:06:27.000048 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:27 crc kubenswrapper[4672]: I1007 15:06:27.001397 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4a0a2c2-a878-473c-86da-5a74aa392982-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " pod="openstack/rabbitmq-server-0" Oct 07 15:06:27 crc kubenswrapper[4672]: I1007 15:06:27.025340 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:06:27 crc kubenswrapper[4672]: I1007 15:06:27.048177 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" event={"ID":"99b7e18e-95f8-452c-a4cb-e59b802b309e","Type":"ContainerStarted","Data":"ab99c3cd6d80aa32f8ae7529315c6d801965462e3652121bdef93fdb983ee44e"} Oct 07 15:06:27 crc kubenswrapper[4672]: I1007 15:06:27.050893 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" event={"ID":"e16c46b7-1761-476e-abd2-1cee19f91e63","Type":"ContainerStarted","Data":"b48aac42d3ff72d69f2a933bcbd308f66d594bf96ed5f4823163b3a33a0885f4"} Oct 07 15:06:27 crc kubenswrapper[4672]: I1007 15:06:27.074648 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.435105 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.437309 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.443444 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.443614 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.443647 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-dppb9" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.444529 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.445619 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.448350 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.458327 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.593986 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.608222 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.608461 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.611618 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.611938 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-r4tcf" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.612221 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.612343 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.620663 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c3168983-5af6-4e27-a5f0-23c80d627c0a-secrets\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.620705 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3168983-5af6-4e27-a5f0-23c80d627c0a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.620738 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c3168983-5af6-4e27-a5f0-23c80d627c0a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.620763 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3168983-5af6-4e27-a5f0-23c80d627c0a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.620779 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk9q2\" (UniqueName: \"kubernetes.io/projected/c3168983-5af6-4e27-a5f0-23c80d627c0a-kube-api-access-dk9q2\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.620802 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.620839 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3168983-5af6-4e27-a5f0-23c80d627c0a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.621815 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c3168983-5af6-4e27-a5f0-23c80d627c0a-config-data-default\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.621836 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c3168983-5af6-4e27-a5f0-23c80d627c0a-kolla-config\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726545 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c3168983-5af6-4e27-a5f0-23c80d627c0a-kolla-config\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726622 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6331c4be-e7ca-4786-9ac8-3aac826906e0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726658 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c3168983-5af6-4e27-a5f0-23c80d627c0a-secrets\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726674 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3168983-5af6-4e27-a5f0-23c80d627c0a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726695 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfzq4\" (UniqueName: \"kubernetes.io/projected/6331c4be-e7ca-4786-9ac8-3aac826906e0-kube-api-access-rfzq4\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726712 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/6331c4be-e7ca-4786-9ac8-3aac826906e0-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726731 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c3168983-5af6-4e27-a5f0-23c80d627c0a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726747 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6331c4be-e7ca-4786-9ac8-3aac826906e0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726763 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6331c4be-e7ca-4786-9ac8-3aac826906e0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726786 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk9q2\" (UniqueName: \"kubernetes.io/projected/c3168983-5af6-4e27-a5f0-23c80d627c0a-kube-api-access-dk9q2\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726806 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3168983-5af6-4e27-a5f0-23c80d627c0a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726823 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6331c4be-e7ca-4786-9ac8-3aac826906e0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726853 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6331c4be-e7ca-4786-9ac8-3aac826906e0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726873 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726953 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3168983-5af6-4e27-a5f0-23c80d627c0a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.728376 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.726972 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6331c4be-e7ca-4786-9ac8-3aac826906e0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.729388 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.729444 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c3168983-5af6-4e27-a5f0-23c80d627c0a-config-data-default\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.730682 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c3168983-5af6-4e27-a5f0-23c80d627c0a-config-data-default\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.730926 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3168983-5af6-4e27-a5f0-23c80d627c0a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.731681 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c3168983-5af6-4e27-a5f0-23c80d627c0a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.731716 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c3168983-5af6-4e27-a5f0-23c80d627c0a-kolla-config\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.737879 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c3168983-5af6-4e27-a5f0-23c80d627c0a-secrets\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.743844 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3168983-5af6-4e27-a5f0-23c80d627c0a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.750444 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk9q2\" (UniqueName: \"kubernetes.io/projected/c3168983-5af6-4e27-a5f0-23c80d627c0a-kube-api-access-dk9q2\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.752048 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3168983-5af6-4e27-a5f0-23c80d627c0a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.759531 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"c3168983-5af6-4e27-a5f0-23c80d627c0a\") " pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.769645 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.804782 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.806229 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.817588 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.817615 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-hwpt4" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.818218 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.827002 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849410 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/6331c4be-e7ca-4786-9ac8-3aac826906e0-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849456 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d93eef-fed4-4023-80a4-8927b2631580-memcached-tls-certs\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849477 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6331c4be-e7ca-4786-9ac8-3aac826906e0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849491 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/09d93eef-fed4-4023-80a4-8927b2631580-config-data\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849549 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6331c4be-e7ca-4786-9ac8-3aac826906e0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849572 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6331c4be-e7ca-4786-9ac8-3aac826906e0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849589 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6331c4be-e7ca-4786-9ac8-3aac826906e0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849604 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vr7n\" (UniqueName: \"kubernetes.io/projected/09d93eef-fed4-4023-80a4-8927b2631580-kube-api-access-6vr7n\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849626 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d93eef-fed4-4023-80a4-8927b2631580-combined-ca-bundle\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849663 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6331c4be-e7ca-4786-9ac8-3aac826906e0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849689 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/09d93eef-fed4-4023-80a4-8927b2631580-kolla-config\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849711 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849746 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6331c4be-e7ca-4786-9ac8-3aac826906e0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.849794 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfzq4\" (UniqueName: \"kubernetes.io/projected/6331c4be-e7ca-4786-9ac8-3aac826906e0-kube-api-access-rfzq4\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.851175 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.854388 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6331c4be-e7ca-4786-9ac8-3aac826906e0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.854729 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6331c4be-e7ca-4786-9ac8-3aac826906e0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.856347 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6331c4be-e7ca-4786-9ac8-3aac826906e0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.859476 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6331c4be-e7ca-4786-9ac8-3aac826906e0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.861662 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/6331c4be-e7ca-4786-9ac8-3aac826906e0-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.864819 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6331c4be-e7ca-4786-9ac8-3aac826906e0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.865317 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6331c4be-e7ca-4786-9ac8-3aac826906e0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.878724 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfzq4\" (UniqueName: \"kubernetes.io/projected/6331c4be-e7ca-4786-9ac8-3aac826906e0-kube-api-access-rfzq4\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.889199 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6331c4be-e7ca-4786-9ac8-3aac826906e0\") " pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.926639 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.950948 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d93eef-fed4-4023-80a4-8927b2631580-memcached-tls-certs\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.951004 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/09d93eef-fed4-4023-80a4-8927b2631580-config-data\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.951083 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vr7n\" (UniqueName: \"kubernetes.io/projected/09d93eef-fed4-4023-80a4-8927b2631580-kube-api-access-6vr7n\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.951114 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d93eef-fed4-4023-80a4-8927b2631580-combined-ca-bundle\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.951217 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/09d93eef-fed4-4023-80a4-8927b2631580-kolla-config\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.952509 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/09d93eef-fed4-4023-80a4-8927b2631580-config-data\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.952575 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/09d93eef-fed4-4023-80a4-8927b2631580-kolla-config\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.954384 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d93eef-fed4-4023-80a4-8927b2631580-memcached-tls-certs\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.965308 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d93eef-fed4-4023-80a4-8927b2631580-combined-ca-bundle\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:29 crc kubenswrapper[4672]: I1007 15:06:29.967589 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vr7n\" (UniqueName: \"kubernetes.io/projected/09d93eef-fed4-4023-80a4-8927b2631580-kube-api-access-6vr7n\") pod \"memcached-0\" (UID: \"09d93eef-fed4-4023-80a4-8927b2631580\") " pod="openstack/memcached-0" Oct 07 15:06:30 crc kubenswrapper[4672]: I1007 15:06:30.148659 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 07 15:06:31 crc kubenswrapper[4672]: I1007 15:06:31.572374 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 15:06:31 crc kubenswrapper[4672]: I1007 15:06:31.573749 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 15:06:31 crc kubenswrapper[4672]: I1007 15:06:31.577511 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-bk2m9" Oct 07 15:06:31 crc kubenswrapper[4672]: I1007 15:06:31.581794 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 15:06:31 crc kubenswrapper[4672]: I1007 15:06:31.674832 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mnvg\" (UniqueName: \"kubernetes.io/projected/7f89db03-61a3-4391-aa60-fc306eb36c31-kube-api-access-9mnvg\") pod \"kube-state-metrics-0\" (UID: \"7f89db03-61a3-4391-aa60-fc306eb36c31\") " pod="openstack/kube-state-metrics-0" Oct 07 15:06:31 crc kubenswrapper[4672]: I1007 15:06:31.775937 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mnvg\" (UniqueName: \"kubernetes.io/projected/7f89db03-61a3-4391-aa60-fc306eb36c31-kube-api-access-9mnvg\") pod \"kube-state-metrics-0\" (UID: \"7f89db03-61a3-4391-aa60-fc306eb36c31\") " pod="openstack/kube-state-metrics-0" Oct 07 15:06:31 crc kubenswrapper[4672]: I1007 15:06:31.794922 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mnvg\" (UniqueName: \"kubernetes.io/projected/7f89db03-61a3-4391-aa60-fc306eb36c31-kube-api-access-9mnvg\") pod \"kube-state-metrics-0\" (UID: \"7f89db03-61a3-4391-aa60-fc306eb36c31\") " pod="openstack/kube-state-metrics-0" Oct 07 15:06:31 crc kubenswrapper[4672]: I1007 15:06:31.900277 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.715107 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-tzsfl"] Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.716610 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.722057 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.722057 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-7skw8" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.722058 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.728820 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-tzsfl"] Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.735422 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-5p5sl"] Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.737352 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.756588 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5p5sl"] Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.835573 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0e4d1227-361f-4c04-b0ce-12295f021364-var-log-ovn\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.835629 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-var-log\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.835661 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0e4d1227-361f-4c04-b0ce-12295f021364-var-run\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.835862 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0e4d1227-361f-4c04-b0ce-12295f021364-var-run-ovn\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.835907 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73afc1bf-c6d8-4d61-b050-db75d0cd219f-scripts\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.835940 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e4d1227-361f-4c04-b0ce-12295f021364-ovn-controller-tls-certs\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.835986 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-etc-ovs\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.836055 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e4d1227-361f-4c04-b0ce-12295f021364-combined-ca-bundle\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.836184 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e4d1227-361f-4c04-b0ce-12295f021364-scripts\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.836211 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-var-lib\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.836230 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6dvn\" (UniqueName: \"kubernetes.io/projected/73afc1bf-c6d8-4d61-b050-db75d0cd219f-kube-api-access-q6dvn\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.836262 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-var-run\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.836362 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ggln\" (UniqueName: \"kubernetes.io/projected/0e4d1227-361f-4c04-b0ce-12295f021364-kube-api-access-2ggln\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938052 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ggln\" (UniqueName: \"kubernetes.io/projected/0e4d1227-361f-4c04-b0ce-12295f021364-kube-api-access-2ggln\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938140 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0e4d1227-361f-4c04-b0ce-12295f021364-var-log-ovn\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938180 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-var-log\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938206 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0e4d1227-361f-4c04-b0ce-12295f021364-var-run\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938246 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0e4d1227-361f-4c04-b0ce-12295f021364-var-run-ovn\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938268 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73afc1bf-c6d8-4d61-b050-db75d0cd219f-scripts\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938294 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e4d1227-361f-4c04-b0ce-12295f021364-ovn-controller-tls-certs\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938319 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-etc-ovs\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938344 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e4d1227-361f-4c04-b0ce-12295f021364-combined-ca-bundle\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938393 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e4d1227-361f-4c04-b0ce-12295f021364-scripts\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938414 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-var-lib\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938434 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6dvn\" (UniqueName: \"kubernetes.io/projected/73afc1bf-c6d8-4d61-b050-db75d0cd219f-kube-api-access-q6dvn\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938454 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-var-run\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.938922 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-var-run\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.939378 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0e4d1227-361f-4c04-b0ce-12295f021364-var-log-ovn\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.939494 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-var-log\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.939544 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0e4d1227-361f-4c04-b0ce-12295f021364-var-run\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.939627 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0e4d1227-361f-4c04-b0ce-12295f021364-var-run-ovn\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.941889 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73afc1bf-c6d8-4d61-b050-db75d0cd219f-scripts\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.942220 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-var-lib\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.942341 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/73afc1bf-c6d8-4d61-b050-db75d0cd219f-etc-ovs\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.943774 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e4d1227-361f-4c04-b0ce-12295f021364-scripts\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.946514 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e4d1227-361f-4c04-b0ce-12295f021364-combined-ca-bundle\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.948867 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e4d1227-361f-4c04-b0ce-12295f021364-ovn-controller-tls-certs\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.962609 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6dvn\" (UniqueName: \"kubernetes.io/projected/73afc1bf-c6d8-4d61-b050-db75d0cd219f-kube-api-access-q6dvn\") pod \"ovn-controller-ovs-5p5sl\" (UID: \"73afc1bf-c6d8-4d61-b050-db75d0cd219f\") " pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:35 crc kubenswrapper[4672]: I1007 15:06:35.973882 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ggln\" (UniqueName: \"kubernetes.io/projected/0e4d1227-361f-4c04-b0ce-12295f021364-kube-api-access-2ggln\") pod \"ovn-controller-tzsfl\" (UID: \"0e4d1227-361f-4c04-b0ce-12295f021364\") " pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.034383 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.052617 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.619561 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.621697 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.631115 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.635290 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.635623 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.635940 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-djn6f" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.636247 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.637461 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.755301 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a4f76b5-fd91-462b-9e85-9c66d83ab353-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.755419 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6a4f76b5-fd91-462b-9e85-9c66d83ab353-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.755447 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a4f76b5-fd91-462b-9e85-9c66d83ab353-config\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.755481 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a4f76b5-fd91-462b-9e85-9c66d83ab353-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.755551 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6a4f76b5-fd91-462b-9e85-9c66d83ab353-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.755595 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnln4\" (UniqueName: \"kubernetes.io/projected/6a4f76b5-fd91-462b-9e85-9c66d83ab353-kube-api-access-rnln4\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.755649 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.755678 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a4f76b5-fd91-462b-9e85-9c66d83ab353-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.857887 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a4f76b5-fd91-462b-9e85-9c66d83ab353-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.857968 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a4f76b5-fd91-462b-9e85-9c66d83ab353-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.858010 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6a4f76b5-fd91-462b-9e85-9c66d83ab353-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.858065 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a4f76b5-fd91-462b-9e85-9c66d83ab353-config\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.858090 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a4f76b5-fd91-462b-9e85-9c66d83ab353-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.858168 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6a4f76b5-fd91-462b-9e85-9c66d83ab353-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.858192 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnln4\" (UniqueName: \"kubernetes.io/projected/6a4f76b5-fd91-462b-9e85-9c66d83ab353-kube-api-access-rnln4\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.858224 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.858553 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.859211 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6a4f76b5-fd91-462b-9e85-9c66d83ab353-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.859719 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a4f76b5-fd91-462b-9e85-9c66d83ab353-config\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.863609 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6a4f76b5-fd91-462b-9e85-9c66d83ab353-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.865903 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a4f76b5-fd91-462b-9e85-9c66d83ab353-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.867012 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a4f76b5-fd91-462b-9e85-9c66d83ab353-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.870579 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a4f76b5-fd91-462b-9e85-9c66d83ab353-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.879046 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnln4\" (UniqueName: \"kubernetes.io/projected/6a4f76b5-fd91-462b-9e85-9c66d83ab353-kube-api-access-rnln4\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.889132 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6a4f76b5-fd91-462b-9e85-9c66d83ab353\") " pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:36 crc kubenswrapper[4672]: I1007 15:06:36.944477 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:38 crc kubenswrapper[4672]: E1007 15:06:38.789141 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-neutron-server:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:06:38 crc kubenswrapper[4672]: E1007 15:06:38.789486 4672 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-neutron-server:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:06:38 crc kubenswrapper[4672]: E1007 15:06:38.789619 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.103:5001/podified-antelope-centos9/openstack-neutron-server:b78cfc68a577b1553523c8a70a34e297,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7xxz2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-67f8579c9-dwxzf_openstack(b8f07978-c49f-4c4d-919b-e46edeedc3fa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 15:06:38 crc kubenswrapper[4672]: E1007 15:06:38.791249 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" podUID="b8f07978-c49f-4c4d-919b-e46edeedc3fa" Oct 07 15:06:38 crc kubenswrapper[4672]: E1007 15:06:38.843003 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-neutron-server:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:06:38 crc kubenswrapper[4672]: E1007 15:06:38.843115 4672 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-neutron-server:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:06:38 crc kubenswrapper[4672]: E1007 15:06:38.843250 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.103:5001/podified-antelope-centos9/openstack-neutron-server:b78cfc68a577b1553523c8a70a34e297,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-25lw9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5468b776f7-95s9w_openstack(38cecc16-036b-451f-a074-faee5c5bcb73): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 15:06:38 crc kubenswrapper[4672]: E1007 15:06:38.844455 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5468b776f7-95s9w" podUID="38cecc16-036b-451f-a074-faee5c5bcb73" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.258343 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.261825 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.264395 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.264601 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.264896 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.265757 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-ntc5z" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.282916 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.306177 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.330268 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.407240 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c6rb\" (UniqueName: \"kubernetes.io/projected/16ba9419-35b8-45df-bf64-86d144d67284-kube-api-access-9c6rb\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.407292 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.407321 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16ba9419-35b8-45df-bf64-86d144d67284-config\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.407362 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16ba9419-35b8-45df-bf64-86d144d67284-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.407379 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/16ba9419-35b8-45df-bf64-86d144d67284-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.407402 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/16ba9419-35b8-45df-bf64-86d144d67284-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.407420 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16ba9419-35b8-45df-bf64-86d144d67284-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.407441 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/16ba9419-35b8-45df-bf64-86d144d67284-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.509830 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16ba9419-35b8-45df-bf64-86d144d67284-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.509901 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/16ba9419-35b8-45df-bf64-86d144d67284-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.509939 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/16ba9419-35b8-45df-bf64-86d144d67284-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.510032 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16ba9419-35b8-45df-bf64-86d144d67284-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.510068 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/16ba9419-35b8-45df-bf64-86d144d67284-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.510191 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c6rb\" (UniqueName: \"kubernetes.io/projected/16ba9419-35b8-45df-bf64-86d144d67284-kube-api-access-9c6rb\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.510250 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.510292 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16ba9419-35b8-45df-bf64-86d144d67284-config\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.512552 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16ba9419-35b8-45df-bf64-86d144d67284-config\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.513413 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/16ba9419-35b8-45df-bf64-86d144d67284-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.514144 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16ba9419-35b8-45df-bf64-86d144d67284-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.515317 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.515738 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16ba9419-35b8-45df-bf64-86d144d67284-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.516770 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/16ba9419-35b8-45df-bf64-86d144d67284-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.517905 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/16ba9419-35b8-45df-bf64-86d144d67284-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.539064 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.542660 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c6rb\" (UniqueName: \"kubernetes.io/projected/16ba9419-35b8-45df-bf64-86d144d67284-kube-api-access-9c6rb\") pod \"ovsdbserver-sb-0\" (UID: \"16ba9419-35b8-45df-bf64-86d144d67284\") " pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.584030 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.606322 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.606584 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.632903 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 15:06:39 crc kubenswrapper[4672]: W1007 15:06:39.646314 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1afcb6ce_1241_4930_8639_bee8a9a76d11.slice/crio-3440494c5ecdc57072ba6c33aa923bf8b54b6824b1bcb73f3522fe2eba8842ce WatchSource:0}: Error finding container 3440494c5ecdc57072ba6c33aa923bf8b54b6824b1bcb73f3522fe2eba8842ce: Status 404 returned error can't find the container with id 3440494c5ecdc57072ba6c33aa923bf8b54b6824b1bcb73f3522fe2eba8842ce Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.656492 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.703723 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.715727 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25lw9\" (UniqueName: \"kubernetes.io/projected/38cecc16-036b-451f-a074-faee5c5bcb73-kube-api-access-25lw9\") pod \"38cecc16-036b-451f-a074-faee5c5bcb73\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.715809 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xxz2\" (UniqueName: \"kubernetes.io/projected/b8f07978-c49f-4c4d-919b-e46edeedc3fa-kube-api-access-7xxz2\") pod \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\" (UID: \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\") " Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.715867 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f07978-c49f-4c4d-919b-e46edeedc3fa-config\") pod \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\" (UID: \"b8f07978-c49f-4c4d-919b-e46edeedc3fa\") " Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.715918 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-config\") pod \"38cecc16-036b-451f-a074-faee5c5bcb73\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.715942 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-dns-svc\") pod \"38cecc16-036b-451f-a074-faee5c5bcb73\" (UID: \"38cecc16-036b-451f-a074-faee5c5bcb73\") " Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.717517 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "38cecc16-036b-451f-a074-faee5c5bcb73" (UID: "38cecc16-036b-451f-a074-faee5c5bcb73"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.717965 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-config" (OuterVolumeSpecName: "config") pod "38cecc16-036b-451f-a074-faee5c5bcb73" (UID: "38cecc16-036b-451f-a074-faee5c5bcb73"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.718231 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8f07978-c49f-4c4d-919b-e46edeedc3fa-config" (OuterVolumeSpecName: "config") pod "b8f07978-c49f-4c4d-919b-e46edeedc3fa" (UID: "b8f07978-c49f-4c4d-919b-e46edeedc3fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.720220 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38cecc16-036b-451f-a074-faee5c5bcb73-kube-api-access-25lw9" (OuterVolumeSpecName: "kube-api-access-25lw9") pod "38cecc16-036b-451f-a074-faee5c5bcb73" (UID: "38cecc16-036b-451f-a074-faee5c5bcb73"). InnerVolumeSpecName "kube-api-access-25lw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.724560 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8f07978-c49f-4c4d-919b-e46edeedc3fa-kube-api-access-7xxz2" (OuterVolumeSpecName: "kube-api-access-7xxz2") pod "b8f07978-c49f-4c4d-919b-e46edeedc3fa" (UID: "b8f07978-c49f-4c4d-919b-e46edeedc3fa"). InnerVolumeSpecName "kube-api-access-7xxz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.746942 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 07 15:06:39 crc kubenswrapper[4672]: W1007 15:06:39.756323 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a4f76b5_fd91_462b_9e85_9c66d83ab353.slice/crio-ce36b6dd5c8179d769e9477ca2f626ae50eb50f9ff5a897b14b16c8dead03789 WatchSource:0}: Error finding container ce36b6dd5c8179d769e9477ca2f626ae50eb50f9ff5a897b14b16c8dead03789: Status 404 returned error can't find the container with id ce36b6dd5c8179d769e9477ca2f626ae50eb50f9ff5a897b14b16c8dead03789 Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.819404 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25lw9\" (UniqueName: \"kubernetes.io/projected/38cecc16-036b-451f-a074-faee5c5bcb73-kube-api-access-25lw9\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.819454 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xxz2\" (UniqueName: \"kubernetes.io/projected/b8f07978-c49f-4c4d-919b-e46edeedc3fa-kube-api-access-7xxz2\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.819471 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f07978-c49f-4c4d-919b-e46edeedc3fa-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.819501 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.819512 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38cecc16-036b-451f-a074-faee5c5bcb73-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.857039 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-tzsfl"] Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.866349 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 15:06:39 crc kubenswrapper[4672]: W1007 15:06:39.882381 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f89db03_61a3_4391_aa60_fc306eb36c31.slice/crio-f0c9e7d3e3c57c3acea9a54114dbeb6cb3c0b2ddd7fe24a3c1b02e62d093f662 WatchSource:0}: Error finding container f0c9e7d3e3c57c3acea9a54114dbeb6cb3c0b2ddd7fe24a3c1b02e62d093f662: Status 404 returned error can't find the container with id f0c9e7d3e3c57c3acea9a54114dbeb6cb3c0b2ddd7fe24a3c1b02e62d093f662 Oct 07 15:06:39 crc kubenswrapper[4672]: I1007 15:06:39.998107 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5p5sl"] Oct 07 15:06:40 crc kubenswrapper[4672]: W1007 15:06:40.002579 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73afc1bf_c6d8_4d61_b050_db75d0cd219f.slice/crio-5b01ebf4bb3491a5dd1d343a7ece6be7c2567de471116c7f877f899ae5eb0a4f WatchSource:0}: Error finding container 5b01ebf4bb3491a5dd1d343a7ece6be7c2567de471116c7f877f899ae5eb0a4f: Status 404 returned error can't find the container with id 5b01ebf4bb3491a5dd1d343a7ece6be7c2567de471116c7f877f899ae5eb0a4f Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.158080 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7f89db03-61a3-4391-aa60-fc306eb36c31","Type":"ContainerStarted","Data":"f0c9e7d3e3c57c3acea9a54114dbeb6cb3c0b2ddd7fe24a3c1b02e62d093f662"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.159174 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.159876 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67f8579c9-dwxzf" event={"ID":"b8f07978-c49f-4c4d-919b-e46edeedc3fa","Type":"ContainerDied","Data":"e03e69e15288907c7f9812021fd25c15aad836e2f9e1d3e9a718514365100724"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.161764 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4a0a2c2-a878-473c-86da-5a74aa392982","Type":"ContainerStarted","Data":"2c763752a776c90044662cd05f25905b0c965c8dd62a72cb18ac7ecbd817cf9e"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.166589 4672 generic.go:334] "Generic (PLEG): container finished" podID="99b7e18e-95f8-452c-a4cb-e59b802b309e" containerID="2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9" exitCode=0 Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.167445 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" event={"ID":"99b7e18e-95f8-452c-a4cb-e59b802b309e","Type":"ContainerDied","Data":"2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.170714 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5p5sl" event={"ID":"73afc1bf-c6d8-4d61-b050-db75d0cd219f","Type":"ContainerStarted","Data":"5b01ebf4bb3491a5dd1d343a7ece6be7c2567de471116c7f877f899ae5eb0a4f"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.173926 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"09d93eef-fed4-4023-80a4-8927b2631580","Type":"ContainerStarted","Data":"16c2e54f2e473539dda4ea3a8bc76033e2d610e818f5ac376d71a0cd65543c1e"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.176760 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1afcb6ce-1241-4930-8639-bee8a9a76d11","Type":"ContainerStarted","Data":"3440494c5ecdc57072ba6c33aa923bf8b54b6824b1bcb73f3522fe2eba8842ce"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.200865 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6331c4be-e7ca-4786-9ac8-3aac826906e0","Type":"ContainerStarted","Data":"c2617cc38dc9d272d57f879578126e5352f12c9a192f83ae8632e41fc2f7e29a"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.204138 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c3168983-5af6-4e27-a5f0-23c80d627c0a","Type":"ContainerStarted","Data":"67c4090d404b0ee825832f07f73cb54967d2eb520aa497c9c98250783bc242b7"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.205179 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67f8579c9-dwxzf"] Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.208349 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5468b776f7-95s9w" event={"ID":"38cecc16-036b-451f-a074-faee5c5bcb73","Type":"ContainerDied","Data":"3478c956364a7b378ddd060737c080142c2cee5cb5d8169000e72cfd3cebfad3"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.208458 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5468b776f7-95s9w" Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.211578 4672 generic.go:334] "Generic (PLEG): container finished" podID="e16c46b7-1761-476e-abd2-1cee19f91e63" containerID="1e8764d484f9e9ce3249aab067e036d8431fe8363e55c586805e9778e7d2da72" exitCode=0 Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.211632 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" event={"ID":"e16c46b7-1761-476e-abd2-1cee19f91e63","Type":"ContainerDied","Data":"1e8764d484f9e9ce3249aab067e036d8431fe8363e55c586805e9778e7d2da72"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.219515 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6a4f76b5-fd91-462b-9e85-9c66d83ab353","Type":"ContainerStarted","Data":"ce36b6dd5c8179d769e9477ca2f626ae50eb50f9ff5a897b14b16c8dead03789"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.228511 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67f8579c9-dwxzf"] Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.234665 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-tzsfl" event={"ID":"0e4d1227-361f-4c04-b0ce-12295f021364","Type":"ContainerStarted","Data":"7ea79f52a609a8e63c230913088b3c2639cf5d96d917885f906696f7082ae38e"} Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.266068 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 07 15:06:40 crc kubenswrapper[4672]: W1007 15:06:40.273715 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16ba9419_35b8_45df_bf64_86d144d67284.slice/crio-57ca407631a9d516348113760aeb77938fd663e357c50c9f8f385636886acac3 WatchSource:0}: Error finding container 57ca407631a9d516348113760aeb77938fd663e357c50c9f8f385636886acac3: Status 404 returned error can't find the container with id 57ca407631a9d516348113760aeb77938fd663e357c50c9f8f385636886acac3 Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.372087 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5468b776f7-95s9w"] Oct 07 15:06:40 crc kubenswrapper[4672]: I1007 15:06:40.384594 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5468b776f7-95s9w"] Oct 07 15:06:41 crc kubenswrapper[4672]: I1007 15:06:41.245042 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"16ba9419-35b8-45df-bf64-86d144d67284","Type":"ContainerStarted","Data":"57ca407631a9d516348113760aeb77938fd663e357c50c9f8f385636886acac3"} Oct 07 15:06:41 crc kubenswrapper[4672]: I1007 15:06:41.248808 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" event={"ID":"99b7e18e-95f8-452c-a4cb-e59b802b309e","Type":"ContainerStarted","Data":"ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f"} Oct 07 15:06:41 crc kubenswrapper[4672]: I1007 15:06:41.248893 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:41 crc kubenswrapper[4672]: I1007 15:06:41.253384 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" event={"ID":"e16c46b7-1761-476e-abd2-1cee19f91e63","Type":"ContainerStarted","Data":"8bf2cce72a61eaca5fd0e00b7825c6bcfac1370d79d0a5eaf0aecb88980cfce2"} Oct 07 15:06:41 crc kubenswrapper[4672]: I1007 15:06:41.253577 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:41 crc kubenswrapper[4672]: I1007 15:06:41.265891 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" podStartSLOduration=3.5469905219999998 podStartE2EDuration="16.265874468s" podCreationTimestamp="2025-10-07 15:06:25 +0000 UTC" firstStartedPulling="2025-10-07 15:06:26.157637986 +0000 UTC m=+1063.132816567" lastFinishedPulling="2025-10-07 15:06:38.876521932 +0000 UTC m=+1075.851700513" observedRunningTime="2025-10-07 15:06:41.265380163 +0000 UTC m=+1078.240558754" watchObservedRunningTime="2025-10-07 15:06:41.265874468 +0000 UTC m=+1078.241053049" Oct 07 15:06:41 crc kubenswrapper[4672]: I1007 15:06:41.284692 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" podStartSLOduration=3.723673797 podStartE2EDuration="16.284675928s" podCreationTimestamp="2025-10-07 15:06:25 +0000 UTC" firstStartedPulling="2025-10-07 15:06:26.394389386 +0000 UTC m=+1063.369567967" lastFinishedPulling="2025-10-07 15:06:38.955391527 +0000 UTC m=+1075.930570098" observedRunningTime="2025-10-07 15:06:41.279366605 +0000 UTC m=+1078.254545186" watchObservedRunningTime="2025-10-07 15:06:41.284675928 +0000 UTC m=+1078.259854499" Oct 07 15:06:41 crc kubenswrapper[4672]: I1007 15:06:41.902472 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38cecc16-036b-451f-a074-faee5c5bcb73" path="/var/lib/kubelet/pods/38cecc16-036b-451f-a074-faee5c5bcb73/volumes" Oct 07 15:06:41 crc kubenswrapper[4672]: I1007 15:06:41.903271 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8f07978-c49f-4c4d-919b-e46edeedc3fa" path="/var/lib/kubelet/pods/b8f07978-c49f-4c4d-919b-e46edeedc3fa/volumes" Oct 07 15:06:45 crc kubenswrapper[4672]: I1007 15:06:45.593280 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:45 crc kubenswrapper[4672]: I1007 15:06:45.919754 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:06:45 crc kubenswrapper[4672]: I1007 15:06:45.966518 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5bc7f59c-wl9pc"] Oct 07 15:06:46 crc kubenswrapper[4672]: I1007 15:06:46.289628 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" podUID="99b7e18e-95f8-452c-a4cb-e59b802b309e" containerName="dnsmasq-dns" containerID="cri-o://ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f" gracePeriod=10 Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.067399 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.242629 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-config\") pod \"99b7e18e-95f8-452c-a4cb-e59b802b309e\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.242962 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2h7mx\" (UniqueName: \"kubernetes.io/projected/99b7e18e-95f8-452c-a4cb-e59b802b309e-kube-api-access-2h7mx\") pod \"99b7e18e-95f8-452c-a4cb-e59b802b309e\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.242995 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-dns-svc\") pod \"99b7e18e-95f8-452c-a4cb-e59b802b309e\" (UID: \"99b7e18e-95f8-452c-a4cb-e59b802b309e\") " Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.250220 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99b7e18e-95f8-452c-a4cb-e59b802b309e-kube-api-access-2h7mx" (OuterVolumeSpecName: "kube-api-access-2h7mx") pod "99b7e18e-95f8-452c-a4cb-e59b802b309e" (UID: "99b7e18e-95f8-452c-a4cb-e59b802b309e"). InnerVolumeSpecName "kube-api-access-2h7mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.288210 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-config" (OuterVolumeSpecName: "config") pod "99b7e18e-95f8-452c-a4cb-e59b802b309e" (UID: "99b7e18e-95f8-452c-a4cb-e59b802b309e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.298358 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "99b7e18e-95f8-452c-a4cb-e59b802b309e" (UID: "99b7e18e-95f8-452c-a4cb-e59b802b309e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.300111 4672 generic.go:334] "Generic (PLEG): container finished" podID="99b7e18e-95f8-452c-a4cb-e59b802b309e" containerID="ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f" exitCode=0 Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.300175 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" event={"ID":"99b7e18e-95f8-452c-a4cb-e59b802b309e","Type":"ContainerDied","Data":"ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f"} Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.300198 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.300226 4672 scope.go:117] "RemoveContainer" containerID="ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.300215 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5bc7f59c-wl9pc" event={"ID":"99b7e18e-95f8-452c-a4cb-e59b802b309e","Type":"ContainerDied","Data":"ab99c3cd6d80aa32f8ae7529315c6d801965462e3652121bdef93fdb983ee44e"} Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.305429 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5p5sl" event={"ID":"73afc1bf-c6d8-4d61-b050-db75d0cd219f","Type":"ContainerStarted","Data":"57cf9a3c2477deb5fae9cb38ba9d0bc52e04160429ee491aef41c6b60d7d09df"} Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.337153 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"09d93eef-fed4-4023-80a4-8927b2631580","Type":"ContainerStarted","Data":"4ce22be811761e74ba87dcd8d82be02f64eb8d21dadab413d3fb3cab7aeddcf6"} Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.337767 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.345511 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2h7mx\" (UniqueName: \"kubernetes.io/projected/99b7e18e-95f8-452c-a4cb-e59b802b309e-kube-api-access-2h7mx\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.345558 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.345609 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b7e18e-95f8-452c-a4cb-e59b802b309e-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.360412 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=11.708163369 podStartE2EDuration="18.360389371s" podCreationTimestamp="2025-10-07 15:06:29 +0000 UTC" firstStartedPulling="2025-10-07 15:06:39.632267339 +0000 UTC m=+1076.607445920" lastFinishedPulling="2025-10-07 15:06:46.284493341 +0000 UTC m=+1083.259671922" observedRunningTime="2025-10-07 15:06:47.354667307 +0000 UTC m=+1084.329845888" watchObservedRunningTime="2025-10-07 15:06:47.360389371 +0000 UTC m=+1084.335567952" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.416168 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5bc7f59c-wl9pc"] Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.424134 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c5bc7f59c-wl9pc"] Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.748283 4672 scope.go:117] "RemoveContainer" containerID="2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.812770 4672 scope.go:117] "RemoveContainer" containerID="ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f" Oct 07 15:06:47 crc kubenswrapper[4672]: E1007 15:06:47.813396 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f\": container with ID starting with ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f not found: ID does not exist" containerID="ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.813466 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f"} err="failed to get container status \"ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f\": rpc error: code = NotFound desc = could not find container \"ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f\": container with ID starting with ca7ae215bfcbeeda4ac47ba950ee9e030fea2318a904d6fc1124b92ec040741f not found: ID does not exist" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.813499 4672 scope.go:117] "RemoveContainer" containerID="2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9" Oct 07 15:06:47 crc kubenswrapper[4672]: E1007 15:06:47.814153 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9\": container with ID starting with 2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9 not found: ID does not exist" containerID="2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.814195 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9"} err="failed to get container status \"2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9\": rpc error: code = NotFound desc = could not find container \"2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9\": container with ID starting with 2d3efbd399dc8066c0b177172da99497cddcfbfb24874dacecc46501d940e9d9 not found: ID does not exist" Oct 07 15:06:47 crc kubenswrapper[4672]: I1007 15:06:47.905338 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99b7e18e-95f8-452c-a4cb-e59b802b309e" path="/var/lib/kubelet/pods/99b7e18e-95f8-452c-a4cb-e59b802b309e/volumes" Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.346110 4672 generic.go:334] "Generic (PLEG): container finished" podID="73afc1bf-c6d8-4d61-b050-db75d0cd219f" containerID="57cf9a3c2477deb5fae9cb38ba9d0bc52e04160429ee491aef41c6b60d7d09df" exitCode=0 Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.346499 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5p5sl" event={"ID":"73afc1bf-c6d8-4d61-b050-db75d0cd219f","Type":"ContainerDied","Data":"57cf9a3c2477deb5fae9cb38ba9d0bc52e04160429ee491aef41c6b60d7d09df"} Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.352927 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c3168983-5af6-4e27-a5f0-23c80d627c0a","Type":"ContainerStarted","Data":"6d8861f5b871484c5e25be3ef29e98332b653d302df459dd821e73fe766976c2"} Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.355408 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6a4f76b5-fd91-462b-9e85-9c66d83ab353","Type":"ContainerStarted","Data":"eb82a7b2828b581f9134a0583b5a7d59d4078eec6f9b7532f26a1304e2335f42"} Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.357553 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6331c4be-e7ca-4786-9ac8-3aac826906e0","Type":"ContainerStarted","Data":"cd762170c1bfdde17ccc8d92c93baf32f9f68c39de06045a7a4d7b8c06c4d85e"} Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.359821 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7f89db03-61a3-4391-aa60-fc306eb36c31","Type":"ContainerStarted","Data":"04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b"} Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.359949 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.361625 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-tzsfl" event={"ID":"0e4d1227-361f-4c04-b0ce-12295f021364","Type":"ContainerStarted","Data":"5fab3826156f6db4528950cbee5d53a488d6a5790bee058016d476847e99b778"} Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.361749 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-tzsfl" Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.363430 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"16ba9419-35b8-45df-bf64-86d144d67284","Type":"ContainerStarted","Data":"407921d204d593071a21b1abb9e8a060b19cac25f3fd402f306ea4c2e86a3489"} Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.384739 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=9.522065265 podStartE2EDuration="17.384718232s" podCreationTimestamp="2025-10-07 15:06:31 +0000 UTC" firstStartedPulling="2025-10-07 15:06:39.902008126 +0000 UTC m=+1076.877186707" lastFinishedPulling="2025-10-07 15:06:47.764661083 +0000 UTC m=+1084.739839674" observedRunningTime="2025-10-07 15:06:48.383398814 +0000 UTC m=+1085.358577395" watchObservedRunningTime="2025-10-07 15:06:48.384718232 +0000 UTC m=+1085.359896813" Oct 07 15:06:48 crc kubenswrapper[4672]: I1007 15:06:48.405568 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-tzsfl" podStartSLOduration=6.538986841 podStartE2EDuration="13.40555372s" podCreationTimestamp="2025-10-07 15:06:35 +0000 UTC" firstStartedPulling="2025-10-07 15:06:39.881579949 +0000 UTC m=+1076.856758530" lastFinishedPulling="2025-10-07 15:06:46.748146828 +0000 UTC m=+1083.723325409" observedRunningTime="2025-10-07 15:06:48.403985525 +0000 UTC m=+1085.379164106" watchObservedRunningTime="2025-10-07 15:06:48.40555372 +0000 UTC m=+1085.380732301" Oct 07 15:06:49 crc kubenswrapper[4672]: I1007 15:06:49.377005 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5p5sl" event={"ID":"73afc1bf-c6d8-4d61-b050-db75d0cd219f","Type":"ContainerStarted","Data":"1cba9d2f810ac441da9d79049eb9aa548172526a6c9c2e0f177fd666029c2e5a"} Oct 07 15:06:49 crc kubenswrapper[4672]: I1007 15:06:49.377402 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5p5sl" event={"ID":"73afc1bf-c6d8-4d61-b050-db75d0cd219f","Type":"ContainerStarted","Data":"c4a912de7541d93f8007ac35060432b62f06f6f06c2c2601fa0cafa8bf893821"} Oct 07 15:06:49 crc kubenswrapper[4672]: I1007 15:06:49.377603 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:49 crc kubenswrapper[4672]: I1007 15:06:49.379204 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1afcb6ce-1241-4930-8639-bee8a9a76d11","Type":"ContainerStarted","Data":"00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9"} Oct 07 15:06:49 crc kubenswrapper[4672]: I1007 15:06:49.381695 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4a0a2c2-a878-473c-86da-5a74aa392982","Type":"ContainerStarted","Data":"35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9"} Oct 07 15:06:49 crc kubenswrapper[4672]: I1007 15:06:49.398581 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-5p5sl" podStartSLOduration=7.759050483 podStartE2EDuration="14.398561601s" podCreationTimestamp="2025-10-07 15:06:35 +0000 UTC" firstStartedPulling="2025-10-07 15:06:40.005090736 +0000 UTC m=+1076.980269317" lastFinishedPulling="2025-10-07 15:06:46.644601864 +0000 UTC m=+1083.619780435" observedRunningTime="2025-10-07 15:06:49.398347315 +0000 UTC m=+1086.373525906" watchObservedRunningTime="2025-10-07 15:06:49.398561601 +0000 UTC m=+1086.373740172" Oct 07 15:06:50 crc kubenswrapper[4672]: I1007 15:06:50.388090 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.395998 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6a4f76b5-fd91-462b-9e85-9c66d83ab353","Type":"ContainerStarted","Data":"ecb3b80237e343245bbc85fb258b65c18f2261ba7d4df434ea2d63ecc2cc1c50"} Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.397488 4672 generic.go:334] "Generic (PLEG): container finished" podID="6331c4be-e7ca-4786-9ac8-3aac826906e0" containerID="cd762170c1bfdde17ccc8d92c93baf32f9f68c39de06045a7a4d7b8c06c4d85e" exitCode=0 Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.397537 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6331c4be-e7ca-4786-9ac8-3aac826906e0","Type":"ContainerDied","Data":"cd762170c1bfdde17ccc8d92c93baf32f9f68c39de06045a7a4d7b8c06c4d85e"} Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.399602 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"16ba9419-35b8-45df-bf64-86d144d67284","Type":"ContainerStarted","Data":"4bd66bb77514bc709277255df55f28376819215b85b85fea5e358c5c6161aeb5"} Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.401988 4672 generic.go:334] "Generic (PLEG): container finished" podID="c3168983-5af6-4e27-a5f0-23c80d627c0a" containerID="6d8861f5b871484c5e25be3ef29e98332b653d302df459dd821e73fe766976c2" exitCode=0 Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.402099 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c3168983-5af6-4e27-a5f0-23c80d627c0a","Type":"ContainerDied","Data":"6d8861f5b871484c5e25be3ef29e98332b653d302df459dd821e73fe766976c2"} Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.423843 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=5.222134331 podStartE2EDuration="16.42382549s" podCreationTimestamp="2025-10-07 15:06:35 +0000 UTC" firstStartedPulling="2025-10-07 15:06:39.759252776 +0000 UTC m=+1076.734431357" lastFinishedPulling="2025-10-07 15:06:50.960943935 +0000 UTC m=+1087.936122516" observedRunningTime="2025-10-07 15:06:51.420959727 +0000 UTC m=+1088.396138308" watchObservedRunningTime="2025-10-07 15:06:51.42382549 +0000 UTC m=+1088.399004071" Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.449427 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=2.77852415 podStartE2EDuration="13.449403224s" podCreationTimestamp="2025-10-07 15:06:38 +0000 UTC" firstStartedPulling="2025-10-07 15:06:40.276183182 +0000 UTC m=+1077.251361763" lastFinishedPulling="2025-10-07 15:06:50.947062256 +0000 UTC m=+1087.922240837" observedRunningTime="2025-10-07 15:06:51.440378235 +0000 UTC m=+1088.415556816" watchObservedRunningTime="2025-10-07 15:06:51.449403224 +0000 UTC m=+1088.424581825" Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.704498 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.740395 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.944935 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.945300 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:51 crc kubenswrapper[4672]: I1007 15:06:51.980899 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.410088 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c3168983-5af6-4e27-a5f0-23c80d627c0a","Type":"ContainerStarted","Data":"0c5eb586a41ffc04929c4d8f8045f60ee1527b4d654e4a9acae1758f7877ea19"} Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.413166 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6331c4be-e7ca-4786-9ac8-3aac826906e0","Type":"ContainerStarted","Data":"5c96cdef3b883d888ecd12b42acda5cdc398a91dffd2a86b71d159afa32f13e1"} Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.413681 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.430918 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=17.14840535 podStartE2EDuration="24.430901535s" podCreationTimestamp="2025-10-07 15:06:28 +0000 UTC" firstStartedPulling="2025-10-07 15:06:39.373711162 +0000 UTC m=+1076.348889743" lastFinishedPulling="2025-10-07 15:06:46.656207347 +0000 UTC m=+1083.631385928" observedRunningTime="2025-10-07 15:06:52.428725932 +0000 UTC m=+1089.403904523" watchObservedRunningTime="2025-10-07 15:06:52.430901535 +0000 UTC m=+1089.406080116" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.451868 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.452305 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.454333 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=17.207109056 podStartE2EDuration="24.454317897s" podCreationTimestamp="2025-10-07 15:06:28 +0000 UTC" firstStartedPulling="2025-10-07 15:06:39.661671303 +0000 UTC m=+1076.636849874" lastFinishedPulling="2025-10-07 15:06:46.908880124 +0000 UTC m=+1083.884058715" observedRunningTime="2025-10-07 15:06:52.447314796 +0000 UTC m=+1089.422493377" watchObservedRunningTime="2025-10-07 15:06:52.454317897 +0000 UTC m=+1089.429496478" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.744262 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7468f5454f-c25px"] Oct 07 15:06:52 crc kubenswrapper[4672]: E1007 15:06:52.747762 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99b7e18e-95f8-452c-a4cb-e59b802b309e" containerName="dnsmasq-dns" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.747809 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="99b7e18e-95f8-452c-a4cb-e59b802b309e" containerName="dnsmasq-dns" Oct 07 15:06:52 crc kubenswrapper[4672]: E1007 15:06:52.747846 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99b7e18e-95f8-452c-a4cb-e59b802b309e" containerName="init" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.747859 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="99b7e18e-95f8-452c-a4cb-e59b802b309e" containerName="init" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.748601 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="99b7e18e-95f8-452c-a4cb-e59b802b309e" containerName="dnsmasq-dns" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.766359 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.771788 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.793357 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7468f5454f-c25px"] Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.838895 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-djrzm"] Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.840603 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.844474 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.862727 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-djrzm"] Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.869438 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-config\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.869789 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.869802 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-dns-svc\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.870925 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-ovsdbserver-nb\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.871080 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5s8k\" (UniqueName: \"kubernetes.io/projected/088608f6-02f1-4e18-b5c6-a42b67f24809-kube-api-access-c5s8k\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.874070 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.876576 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.876757 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.876909 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.877372 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-z9h72" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.895103 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.914958 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7468f5454f-c25px"] Oct 07 15:06:52 crc kubenswrapper[4672]: E1007 15:06:52.920919 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-c5s8k ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-7468f5454f-c25px" podUID="088608f6-02f1-4e18-b5c6-a42b67f24809" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.943517 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-88449cf85-2qbk6"] Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.945840 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.948216 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.958603 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-88449cf85-2qbk6"] Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972210 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/76c480b3-ae62-4b55-b055-d0c1c0ff5777-ovn-rundir\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972263 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c480b3-ae62-4b55-b055-d0c1c0ff5777-combined-ca-bundle\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972292 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76c480b3-ae62-4b55-b055-d0c1c0ff5777-config\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972317 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972340 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/76c480b3-ae62-4b55-b055-d0c1c0ff5777-ovs-rundir\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972477 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-dns-svc\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972572 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-ovsdbserver-nb\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972613 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972635 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwcsb\" (UniqueName: \"kubernetes.io/projected/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-kube-api-access-xwcsb\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972662 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5s8k\" (UniqueName: \"kubernetes.io/projected/088608f6-02f1-4e18-b5c6-a42b67f24809-kube-api-access-c5s8k\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972683 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c480b3-ae62-4b55-b055-d0c1c0ff5777-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972725 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-scripts\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972750 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972783 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972850 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szbnz\" (UniqueName: \"kubernetes.io/projected/76c480b3-ae62-4b55-b055-d0c1c0ff5777-kube-api-access-szbnz\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972872 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-config\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.972906 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-config\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.973757 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-config\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.973809 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-dns-svc\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.974366 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-ovsdbserver-nb\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:52 crc kubenswrapper[4672]: I1007 15:06:52.991968 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5s8k\" (UniqueName: \"kubernetes.io/projected/088608f6-02f1-4e18-b5c6-a42b67f24809-kube-api-access-c5s8k\") pod \"dnsmasq-dns-7468f5454f-c25px\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.074365 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szbnz\" (UniqueName: \"kubernetes.io/projected/76c480b3-ae62-4b55-b055-d0c1c0ff5777-kube-api-access-szbnz\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.074694 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-config\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.074821 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-sb\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.074944 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/76c480b3-ae62-4b55-b055-d0c1c0ff5777-ovn-rundir\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.075084 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c480b3-ae62-4b55-b055-d0c1c0ff5777-combined-ca-bundle\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.075252 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76c480b3-ae62-4b55-b055-d0c1c0ff5777-config\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.075423 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.076402 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/76c480b3-ae62-4b55-b055-d0c1c0ff5777-ovs-rundir\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.076534 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-dns-svc\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.076709 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-nb\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.076842 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.076952 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwcsb\" (UniqueName: \"kubernetes.io/projected/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-kube-api-access-xwcsb\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.077180 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c480b3-ae62-4b55-b055-d0c1c0ff5777-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.075664 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-config\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.077303 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-scripts\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.077364 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/76c480b3-ae62-4b55-b055-d0c1c0ff5777-ovs-rundir\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.075352 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/76c480b3-ae62-4b55-b055-d0c1c0ff5777-ovn-rundir\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.077407 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.077469 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcsbf\" (UniqueName: \"kubernetes.io/projected/fe482f39-ac91-4461-bd30-b733a8c78137-kube-api-access-mcsbf\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.077502 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.076302 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76c480b3-ae62-4b55-b055-d0c1c0ff5777-config\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.077565 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-config\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.077918 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.080313 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.080552 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-scripts\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.081051 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.087974 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c480b3-ae62-4b55-b055-d0c1c0ff5777-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.090394 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c480b3-ae62-4b55-b055-d0c1c0ff5777-combined-ca-bundle\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.094178 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szbnz\" (UniqueName: \"kubernetes.io/projected/76c480b3-ae62-4b55-b055-d0c1c0ff5777-kube-api-access-szbnz\") pod \"ovn-controller-metrics-djrzm\" (UID: \"76c480b3-ae62-4b55-b055-d0c1c0ff5777\") " pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.095492 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwcsb\" (UniqueName: \"kubernetes.io/projected/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-kube-api-access-xwcsb\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.095989 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2cc7fd-952a-4115-a4e4-408bfc75a54d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b2cc7fd-952a-4115-a4e4-408bfc75a54d\") " pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.168555 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-djrzm" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.178502 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-dns-svc\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.179036 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-nb\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.179188 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcsbf\" (UniqueName: \"kubernetes.io/projected/fe482f39-ac91-4461-bd30-b733a8c78137-kube-api-access-mcsbf\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.179286 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-config\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.179427 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-sb\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.179557 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-dns-svc\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.180411 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-sb\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.180616 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-nb\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.181166 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-config\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.201981 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcsbf\" (UniqueName: \"kubernetes.io/projected/fe482f39-ac91-4461-bd30-b733a8c78137-kube-api-access-mcsbf\") pod \"dnsmasq-dns-88449cf85-2qbk6\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.206741 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.274504 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.422664 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.434576 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.483470 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5s8k\" (UniqueName: \"kubernetes.io/projected/088608f6-02f1-4e18-b5c6-a42b67f24809-kube-api-access-c5s8k\") pod \"088608f6-02f1-4e18-b5c6-a42b67f24809\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.484416 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-dns-svc\") pod \"088608f6-02f1-4e18-b5c6-a42b67f24809\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.484546 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-config\") pod \"088608f6-02f1-4e18-b5c6-a42b67f24809\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.484573 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-ovsdbserver-nb\") pod \"088608f6-02f1-4e18-b5c6-a42b67f24809\" (UID: \"088608f6-02f1-4e18-b5c6-a42b67f24809\") " Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.484987 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "088608f6-02f1-4e18-b5c6-a42b67f24809" (UID: "088608f6-02f1-4e18-b5c6-a42b67f24809"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.485328 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-config" (OuterVolumeSpecName: "config") pod "088608f6-02f1-4e18-b5c6-a42b67f24809" (UID: "088608f6-02f1-4e18-b5c6-a42b67f24809"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.485763 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "088608f6-02f1-4e18-b5c6-a42b67f24809" (UID: "088608f6-02f1-4e18-b5c6-a42b67f24809"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.486316 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.486334 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.486346 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/088608f6-02f1-4e18-b5c6-a42b67f24809-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.488266 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/088608f6-02f1-4e18-b5c6-a42b67f24809-kube-api-access-c5s8k" (OuterVolumeSpecName: "kube-api-access-c5s8k") pod "088608f6-02f1-4e18-b5c6-a42b67f24809" (UID: "088608f6-02f1-4e18-b5c6-a42b67f24809"). InnerVolumeSpecName "kube-api-access-c5s8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.587833 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5s8k\" (UniqueName: \"kubernetes.io/projected/088608f6-02f1-4e18-b5c6-a42b67f24809-kube-api-access-c5s8k\") on node \"crc\" DevicePath \"\"" Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.646142 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-djrzm"] Oct 07 15:06:53 crc kubenswrapper[4672]: W1007 15:06:53.650260 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76c480b3_ae62_4b55_b055_d0c1c0ff5777.slice/crio-b73fc9aab1824970287a24a7dc7609cd914947ccdb1c6442fd2ffebd81248103 WatchSource:0}: Error finding container b73fc9aab1824970287a24a7dc7609cd914947ccdb1c6442fd2ffebd81248103: Status 404 returned error can't find the container with id b73fc9aab1824970287a24a7dc7609cd914947ccdb1c6442fd2ffebd81248103 Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.746075 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 07 15:06:53 crc kubenswrapper[4672]: W1007 15:06:53.757063 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b2cc7fd_952a_4115_a4e4_408bfc75a54d.slice/crio-7c91539a9e095b29c5102bde6bdfe9809c511a1cf8928fab58cf23a1639bb1d6 WatchSource:0}: Error finding container 7c91539a9e095b29c5102bde6bdfe9809c511a1cf8928fab58cf23a1639bb1d6: Status 404 returned error can't find the container with id 7c91539a9e095b29c5102bde6bdfe9809c511a1cf8928fab58cf23a1639bb1d6 Oct 07 15:06:53 crc kubenswrapper[4672]: I1007 15:06:53.823971 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-88449cf85-2qbk6"] Oct 07 15:06:53 crc kubenswrapper[4672]: W1007 15:06:53.826814 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe482f39_ac91_4461_bd30_b733a8c78137.slice/crio-864e8b1b714d65923e9642180be17f495878a37d31c4d9da379a1d95849646e2 WatchSource:0}: Error finding container 864e8b1b714d65923e9642180be17f495878a37d31c4d9da379a1d95849646e2: Status 404 returned error can't find the container with id 864e8b1b714d65923e9642180be17f495878a37d31c4d9da379a1d95849646e2 Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.431228 4672 generic.go:334] "Generic (PLEG): container finished" podID="fe482f39-ac91-4461-bd30-b733a8c78137" containerID="806df387ebbaf7bfd44463cc6bd753ea191b44336566d126d6d679488aba61a7" exitCode=0 Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.431360 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" event={"ID":"fe482f39-ac91-4461-bd30-b733a8c78137","Type":"ContainerDied","Data":"806df387ebbaf7bfd44463cc6bd753ea191b44336566d126d6d679488aba61a7"} Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.431565 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" event={"ID":"fe482f39-ac91-4461-bd30-b733a8c78137","Type":"ContainerStarted","Data":"864e8b1b714d65923e9642180be17f495878a37d31c4d9da379a1d95849646e2"} Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.433449 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-djrzm" event={"ID":"76c480b3-ae62-4b55-b055-d0c1c0ff5777","Type":"ContainerStarted","Data":"9b0fa8a4f29e71314c1293b2e7e2e97b86f78a77f03b528833d6dfabaa9fd021"} Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.433477 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-djrzm" event={"ID":"76c480b3-ae62-4b55-b055-d0c1c0ff5777","Type":"ContainerStarted","Data":"b73fc9aab1824970287a24a7dc7609cd914947ccdb1c6442fd2ffebd81248103"} Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.435146 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8b2cc7fd-952a-4115-a4e4-408bfc75a54d","Type":"ContainerStarted","Data":"7c91539a9e095b29c5102bde6bdfe9809c511a1cf8928fab58cf23a1639bb1d6"} Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.435363 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7468f5454f-c25px" Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.539203 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7468f5454f-c25px"] Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.557872 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7468f5454f-c25px"] Oct 07 15:06:54 crc kubenswrapper[4672]: I1007 15:06:54.560427 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-djrzm" podStartSLOduration=2.560404777 podStartE2EDuration="2.560404777s" podCreationTimestamp="2025-10-07 15:06:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:06:54.500162407 +0000 UTC m=+1091.475341008" watchObservedRunningTime="2025-10-07 15:06:54.560404777 +0000 UTC m=+1091.535583358" Oct 07 15:06:55 crc kubenswrapper[4672]: I1007 15:06:55.150546 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 07 15:06:55 crc kubenswrapper[4672]: I1007 15:06:55.444516 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8b2cc7fd-952a-4115-a4e4-408bfc75a54d","Type":"ContainerStarted","Data":"77d526855b0f42dcdb80a4297caf7d5a6da24136e43df8dc1c069cde03914b2f"} Oct 07 15:06:55 crc kubenswrapper[4672]: I1007 15:06:55.444557 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8b2cc7fd-952a-4115-a4e4-408bfc75a54d","Type":"ContainerStarted","Data":"40155e4a673f9f0a5ffabce26156a580a83b9c36c6aaf418e251c7d30c36493e"} Oct 07 15:06:55 crc kubenswrapper[4672]: I1007 15:06:55.444665 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 07 15:06:55 crc kubenswrapper[4672]: I1007 15:06:55.447509 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" event={"ID":"fe482f39-ac91-4461-bd30-b733a8c78137","Type":"ContainerStarted","Data":"cdccfb816da1be42ba88480b22adc20a6017afc73c2a6435758089a3c8513c3e"} Oct 07 15:06:55 crc kubenswrapper[4672]: I1007 15:06:55.464058 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.690635688 podStartE2EDuration="3.46400037s" podCreationTimestamp="2025-10-07 15:06:52 +0000 UTC" firstStartedPulling="2025-10-07 15:06:53.760505133 +0000 UTC m=+1090.735683714" lastFinishedPulling="2025-10-07 15:06:54.533869815 +0000 UTC m=+1091.509048396" observedRunningTime="2025-10-07 15:06:55.462028223 +0000 UTC m=+1092.437206804" watchObservedRunningTime="2025-10-07 15:06:55.46400037 +0000 UTC m=+1092.439178951" Oct 07 15:06:55 crc kubenswrapper[4672]: I1007 15:06:55.483759 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" podStartSLOduration=3.483743767 podStartE2EDuration="3.483743767s" podCreationTimestamp="2025-10-07 15:06:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:06:55.48108086 +0000 UTC m=+1092.456259441" watchObservedRunningTime="2025-10-07 15:06:55.483743767 +0000 UTC m=+1092.458922348" Oct 07 15:06:55 crc kubenswrapper[4672]: I1007 15:06:55.900138 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="088608f6-02f1-4e18-b5c6-a42b67f24809" path="/var/lib/kubelet/pods/088608f6-02f1-4e18-b5c6-a42b67f24809/volumes" Oct 07 15:06:56 crc kubenswrapper[4672]: I1007 15:06:56.453937 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:06:56 crc kubenswrapper[4672]: I1007 15:06:56.650136 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:06:56 crc kubenswrapper[4672]: I1007 15:06:56.650402 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:06:59 crc kubenswrapper[4672]: I1007 15:06:59.770064 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 07 15:06:59 crc kubenswrapper[4672]: I1007 15:06:59.770504 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 07 15:06:59 crc kubenswrapper[4672]: I1007 15:06:59.812522 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 07 15:06:59 crc kubenswrapper[4672]: I1007 15:06:59.927484 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:59 crc kubenswrapper[4672]: I1007 15:06:59.927868 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 07 15:06:59 crc kubenswrapper[4672]: I1007 15:06:59.967546 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 07 15:07:00 crc kubenswrapper[4672]: I1007 15:07:00.531046 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 07 15:07:00 crc kubenswrapper[4672]: I1007 15:07:00.541382 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 07 15:07:01 crc kubenswrapper[4672]: I1007 15:07:01.839107 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-88449cf85-2qbk6"] Oct 07 15:07:01 crc kubenswrapper[4672]: I1007 15:07:01.839347 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" podUID="fe482f39-ac91-4461-bd30-b733a8c78137" containerName="dnsmasq-dns" containerID="cri-o://cdccfb816da1be42ba88480b22adc20a6017afc73c2a6435758089a3c8513c3e" gracePeriod=10 Oct 07 15:07:01 crc kubenswrapper[4672]: I1007 15:07:01.841866 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:07:01 crc kubenswrapper[4672]: I1007 15:07:01.873400 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-64d796cf9-nxb9q"] Oct 07 15:07:01 crc kubenswrapper[4672]: I1007 15:07:01.875186 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:01 crc kubenswrapper[4672]: I1007 15:07:01.907592 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64d796cf9-nxb9q"] Oct 07 15:07:01 crc kubenswrapper[4672]: I1007 15:07:01.919560 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.031493 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-nb\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.031541 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-dns-svc\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.031561 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-config\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.031832 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xccs9\" (UniqueName: \"kubernetes.io/projected/58bd9bee-ec13-4d46-9a0a-2e94629ac059-kube-api-access-xccs9\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.032050 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-sb\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.134090 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-sb\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.134231 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-nb\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.134263 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-dns-svc\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.134290 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-config\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.134333 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xccs9\" (UniqueName: \"kubernetes.io/projected/58bd9bee-ec13-4d46-9a0a-2e94629ac059-kube-api-access-xccs9\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.135035 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-sb\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.135130 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-dns-svc\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.135196 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-nb\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.135214 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-config\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.155141 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xccs9\" (UniqueName: \"kubernetes.io/projected/58bd9bee-ec13-4d46-9a0a-2e94629ac059-kube-api-access-xccs9\") pod \"dnsmasq-dns-64d796cf9-nxb9q\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.220363 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.505352 4672 generic.go:334] "Generic (PLEG): container finished" podID="fe482f39-ac91-4461-bd30-b733a8c78137" containerID="cdccfb816da1be42ba88480b22adc20a6017afc73c2a6435758089a3c8513c3e" exitCode=0 Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.505429 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" event={"ID":"fe482f39-ac91-4461-bd30-b733a8c78137","Type":"ContainerDied","Data":"cdccfb816da1be42ba88480b22adc20a6017afc73c2a6435758089a3c8513c3e"} Oct 07 15:07:02 crc kubenswrapper[4672]: W1007 15:07:02.643500 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58bd9bee_ec13_4d46_9a0a_2e94629ac059.slice/crio-b61e384e325c149aef43f05673bc58b42d11ae2b53aa4038a9b85bad171f7fe8 WatchSource:0}: Error finding container b61e384e325c149aef43f05673bc58b42d11ae2b53aa4038a9b85bad171f7fe8: Status 404 returned error can't find the container with id b61e384e325c149aef43f05673bc58b42d11ae2b53aa4038a9b85bad171f7fe8 Oct 07 15:07:02 crc kubenswrapper[4672]: I1007 15:07:02.647632 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64d796cf9-nxb9q"] Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.047514 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.052878 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.054755 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.054947 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.055575 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.055882 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-hw82n" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.070533 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.149914 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/99812f34-1d2c-4f0f-bf07-9569fde6d437-lock\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.149980 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.150142 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/99812f34-1d2c-4f0f-bf07-9569fde6d437-cache\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.150199 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.150339 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gnvd\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-kube-api-access-4gnvd\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.251780 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.251850 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/99812f34-1d2c-4f0f-bf07-9569fde6d437-cache\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.251880 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.251922 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gnvd\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-kube-api-access-4gnvd\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.251961 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/99812f34-1d2c-4f0f-bf07-9569fde6d437-lock\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: E1007 15:07:03.252107 4672 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 15:07:03 crc kubenswrapper[4672]: E1007 15:07:03.252143 4672 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 15:07:03 crc kubenswrapper[4672]: E1007 15:07:03.252208 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift podName:99812f34-1d2c-4f0f-bf07-9569fde6d437 nodeName:}" failed. No retries permitted until 2025-10-07 15:07:03.752187139 +0000 UTC m=+1100.727365780 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift") pod "swift-storage-0" (UID: "99812f34-1d2c-4f0f-bf07-9569fde6d437") : configmap "swift-ring-files" not found Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.252283 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/99812f34-1d2c-4f0f-bf07-9569fde6d437-cache\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.252326 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.252337 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/99812f34-1d2c-4f0f-bf07-9569fde6d437-lock\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.272981 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.275867 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" podUID="fe482f39-ac91-4461-bd30-b733a8c78137" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: connect: connection refused" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.278267 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gnvd\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-kube-api-access-4gnvd\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.514549 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" event={"ID":"58bd9bee-ec13-4d46-9a0a-2e94629ac059","Type":"ContainerStarted","Data":"b61e384e325c149aef43f05673bc58b42d11ae2b53aa4038a9b85bad171f7fe8"} Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.535628 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-r4b6l"] Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.536938 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.542405 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.542908 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.548823 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.556645 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-r4b6l"] Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.659793 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-combined-ca-bundle\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.659955 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a57e275b-ff63-4284-aae2-7fbc858c0128-etc-swift\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.660102 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-dispersionconf\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.660153 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-scripts\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.660192 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-swiftconf\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.660242 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-ring-data-devices\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.660469 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjlhd\" (UniqueName: \"kubernetes.io/projected/a57e275b-ff63-4284-aae2-7fbc858c0128-kube-api-access-gjlhd\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.762563 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-dispersionconf\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.762617 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-scripts\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.762646 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-swiftconf\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.762677 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-ring-data-devices\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.762729 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjlhd\" (UniqueName: \"kubernetes.io/projected/a57e275b-ff63-4284-aae2-7fbc858c0128-kube-api-access-gjlhd\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.762761 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.762841 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-combined-ca-bundle\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.762893 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a57e275b-ff63-4284-aae2-7fbc858c0128-etc-swift\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: E1007 15:07:03.763366 4672 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 15:07:03 crc kubenswrapper[4672]: E1007 15:07:03.763409 4672 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 15:07:03 crc kubenswrapper[4672]: E1007 15:07:03.763465 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift podName:99812f34-1d2c-4f0f-bf07-9569fde6d437 nodeName:}" failed. No retries permitted until 2025-10-07 15:07:04.763446933 +0000 UTC m=+1101.738625514 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift") pod "swift-storage-0" (UID: "99812f34-1d2c-4f0f-bf07-9569fde6d437") : configmap "swift-ring-files" not found Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.764291 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a57e275b-ff63-4284-aae2-7fbc858c0128-etc-swift\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.764666 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-ring-data-devices\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.764679 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-scripts\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.765736 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-dispersionconf\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.768503 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-swiftconf\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.783561 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-combined-ca-bundle\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.786998 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjlhd\" (UniqueName: \"kubernetes.io/projected/a57e275b-ff63-4284-aae2-7fbc858c0128-kube-api-access-gjlhd\") pod \"swift-ring-rebalance-r4b6l\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:03 crc kubenswrapper[4672]: I1007 15:07:03.858419 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:04 crc kubenswrapper[4672]: I1007 15:07:04.397953 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-r4b6l"] Oct 07 15:07:04 crc kubenswrapper[4672]: W1007 15:07:04.402351 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda57e275b_ff63_4284_aae2_7fbc858c0128.slice/crio-f26e57580e84e7f8e322dcb34408cf11c6a1f2ccd063d9f060e2be871b0a6c17 WatchSource:0}: Error finding container f26e57580e84e7f8e322dcb34408cf11c6a1f2ccd063d9f060e2be871b0a6c17: Status 404 returned error can't find the container with id f26e57580e84e7f8e322dcb34408cf11c6a1f2ccd063d9f060e2be871b0a6c17 Oct 07 15:07:04 crc kubenswrapper[4672]: I1007 15:07:04.524978 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-r4b6l" event={"ID":"a57e275b-ff63-4284-aae2-7fbc858c0128","Type":"ContainerStarted","Data":"f26e57580e84e7f8e322dcb34408cf11c6a1f2ccd063d9f060e2be871b0a6c17"} Oct 07 15:07:04 crc kubenswrapper[4672]: I1007 15:07:04.789473 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:04 crc kubenswrapper[4672]: E1007 15:07:04.789704 4672 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 15:07:04 crc kubenswrapper[4672]: E1007 15:07:04.789853 4672 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 15:07:04 crc kubenswrapper[4672]: E1007 15:07:04.789910 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift podName:99812f34-1d2c-4f0f-bf07-9569fde6d437 nodeName:}" failed. No retries permitted until 2025-10-07 15:07:06.789890095 +0000 UTC m=+1103.765068676 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift") pod "swift-storage-0" (UID: "99812f34-1d2c-4f0f-bf07-9569fde6d437") : configmap "swift-ring-files" not found Oct 07 15:07:05 crc kubenswrapper[4672]: I1007 15:07:05.423752 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-nddgp"] Oct 07 15:07:05 crc kubenswrapper[4672]: I1007 15:07:05.425515 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nddgp" Oct 07 15:07:05 crc kubenswrapper[4672]: I1007 15:07:05.434807 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nddgp"] Oct 07 15:07:05 crc kubenswrapper[4672]: I1007 15:07:05.502542 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68zqh\" (UniqueName: \"kubernetes.io/projected/47f8f609-ee6c-49c6-a617-9f258638926b-kube-api-access-68zqh\") pod \"glance-db-create-nddgp\" (UID: \"47f8f609-ee6c-49c6-a617-9f258638926b\") " pod="openstack/glance-db-create-nddgp" Oct 07 15:07:05 crc kubenswrapper[4672]: I1007 15:07:05.603933 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68zqh\" (UniqueName: \"kubernetes.io/projected/47f8f609-ee6c-49c6-a617-9f258638926b-kube-api-access-68zqh\") pod \"glance-db-create-nddgp\" (UID: \"47f8f609-ee6c-49c6-a617-9f258638926b\") " pod="openstack/glance-db-create-nddgp" Oct 07 15:07:05 crc kubenswrapper[4672]: I1007 15:07:05.626914 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68zqh\" (UniqueName: \"kubernetes.io/projected/47f8f609-ee6c-49c6-a617-9f258638926b-kube-api-access-68zqh\") pod \"glance-db-create-nddgp\" (UID: \"47f8f609-ee6c-49c6-a617-9f258638926b\") " pod="openstack/glance-db-create-nddgp" Oct 07 15:07:05 crc kubenswrapper[4672]: I1007 15:07:05.747208 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nddgp" Oct 07 15:07:05 crc kubenswrapper[4672]: I1007 15:07:05.973832 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nddgp"] Oct 07 15:07:06 crc kubenswrapper[4672]: W1007 15:07:06.002070 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f8f609_ee6c_49c6_a617_9f258638926b.slice/crio-272ae235eebad86d3179b236c6dfcbdee40389e9c2a506916c1ce68ebea2b0b4 WatchSource:0}: Error finding container 272ae235eebad86d3179b236c6dfcbdee40389e9c2a506916c1ce68ebea2b0b4: Status 404 returned error can't find the container with id 272ae235eebad86d3179b236c6dfcbdee40389e9c2a506916c1ce68ebea2b0b4 Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.152349 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.315829 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcsbf\" (UniqueName: \"kubernetes.io/projected/fe482f39-ac91-4461-bd30-b733a8c78137-kube-api-access-mcsbf\") pod \"fe482f39-ac91-4461-bd30-b733a8c78137\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.315961 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-sb\") pod \"fe482f39-ac91-4461-bd30-b733a8c78137\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.316100 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-dns-svc\") pod \"fe482f39-ac91-4461-bd30-b733a8c78137\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.316176 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-config\") pod \"fe482f39-ac91-4461-bd30-b733a8c78137\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.316251 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-nb\") pod \"fe482f39-ac91-4461-bd30-b733a8c78137\" (UID: \"fe482f39-ac91-4461-bd30-b733a8c78137\") " Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.320933 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe482f39-ac91-4461-bd30-b733a8c78137-kube-api-access-mcsbf" (OuterVolumeSpecName: "kube-api-access-mcsbf") pod "fe482f39-ac91-4461-bd30-b733a8c78137" (UID: "fe482f39-ac91-4461-bd30-b733a8c78137"). InnerVolumeSpecName "kube-api-access-mcsbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.350689 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-config" (OuterVolumeSpecName: "config") pod "fe482f39-ac91-4461-bd30-b733a8c78137" (UID: "fe482f39-ac91-4461-bd30-b733a8c78137"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.350693 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fe482f39-ac91-4461-bd30-b733a8c78137" (UID: "fe482f39-ac91-4461-bd30-b733a8c78137"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.351341 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fe482f39-ac91-4461-bd30-b733a8c78137" (UID: "fe482f39-ac91-4461-bd30-b733a8c78137"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.351700 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fe482f39-ac91-4461-bd30-b733a8c78137" (UID: "fe482f39-ac91-4461-bd30-b733a8c78137"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.418426 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.418748 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.418761 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.418770 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe482f39-ac91-4461-bd30-b733a8c78137-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.418781 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcsbf\" (UniqueName: \"kubernetes.io/projected/fe482f39-ac91-4461-bd30-b733a8c78137-kube-api-access-mcsbf\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.546467 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" event={"ID":"fe482f39-ac91-4461-bd30-b733a8c78137","Type":"ContainerDied","Data":"864e8b1b714d65923e9642180be17f495878a37d31c4d9da379a1d95849646e2"} Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.546511 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-88449cf85-2qbk6" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.546535 4672 scope.go:117] "RemoveContainer" containerID="cdccfb816da1be42ba88480b22adc20a6017afc73c2a6435758089a3c8513c3e" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.550460 4672 generic.go:334] "Generic (PLEG): container finished" podID="58bd9bee-ec13-4d46-9a0a-2e94629ac059" containerID="d6296fcf74afdd02c9a675ee65509d63d56b408346a407011825344d9f879d12" exitCode=0 Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.550534 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" event={"ID":"58bd9bee-ec13-4d46-9a0a-2e94629ac059","Type":"ContainerDied","Data":"d6296fcf74afdd02c9a675ee65509d63d56b408346a407011825344d9f879d12"} Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.552796 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nddgp" event={"ID":"47f8f609-ee6c-49c6-a617-9f258638926b","Type":"ContainerStarted","Data":"497b4799b70c212352a3a49ddafe22596bd73fcf2c6bb07fac6e9dc577dcdfda"} Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.552832 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nddgp" event={"ID":"47f8f609-ee6c-49c6-a617-9f258638926b","Type":"ContainerStarted","Data":"272ae235eebad86d3179b236c6dfcbdee40389e9c2a506916c1ce68ebea2b0b4"} Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.577231 4672 scope.go:117] "RemoveContainer" containerID="806df387ebbaf7bfd44463cc6bd753ea191b44336566d126d6d679488aba61a7" Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.594368 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-88449cf85-2qbk6"] Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.599673 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-88449cf85-2qbk6"] Oct 07 15:07:06 crc kubenswrapper[4672]: I1007 15:07:06.833757 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:06 crc kubenswrapper[4672]: E1007 15:07:06.833963 4672 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 15:07:06 crc kubenswrapper[4672]: E1007 15:07:06.833987 4672 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 15:07:06 crc kubenswrapper[4672]: E1007 15:07:06.834054 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift podName:99812f34-1d2c-4f0f-bf07-9569fde6d437 nodeName:}" failed. No retries permitted until 2025-10-07 15:07:10.834039525 +0000 UTC m=+1107.809218106 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift") pod "swift-storage-0" (UID: "99812f34-1d2c-4f0f-bf07-9569fde6d437") : configmap "swift-ring-files" not found Oct 07 15:07:07 crc kubenswrapper[4672]: I1007 15:07:07.562939 4672 generic.go:334] "Generic (PLEG): container finished" podID="47f8f609-ee6c-49c6-a617-9f258638926b" containerID="497b4799b70c212352a3a49ddafe22596bd73fcf2c6bb07fac6e9dc577dcdfda" exitCode=0 Oct 07 15:07:07 crc kubenswrapper[4672]: I1007 15:07:07.563003 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nddgp" event={"ID":"47f8f609-ee6c-49c6-a617-9f258638926b","Type":"ContainerDied","Data":"497b4799b70c212352a3a49ddafe22596bd73fcf2c6bb07fac6e9dc577dcdfda"} Oct 07 15:07:07 crc kubenswrapper[4672]: I1007 15:07:07.566486 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" event={"ID":"58bd9bee-ec13-4d46-9a0a-2e94629ac059","Type":"ContainerStarted","Data":"f34669468ae74494b5600e4486e7a3db912679745d16d58e1dca44aaf9ca8ed8"} Oct 07 15:07:07 crc kubenswrapper[4672]: I1007 15:07:07.566632 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:07 crc kubenswrapper[4672]: I1007 15:07:07.596308 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" podStartSLOduration=6.596291528 podStartE2EDuration="6.596291528s" podCreationTimestamp="2025-10-07 15:07:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:07:07.591290715 +0000 UTC m=+1104.566469296" watchObservedRunningTime="2025-10-07 15:07:07.596291528 +0000 UTC m=+1104.571470109" Oct 07 15:07:07 crc kubenswrapper[4672]: I1007 15:07:07.905629 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe482f39-ac91-4461-bd30-b733a8c78137" path="/var/lib/kubelet/pods/fe482f39-ac91-4461-bd30-b733a8c78137/volumes" Oct 07 15:07:08 crc kubenswrapper[4672]: I1007 15:07:08.278837 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.223593 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nddgp" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.381111 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68zqh\" (UniqueName: \"kubernetes.io/projected/47f8f609-ee6c-49c6-a617-9f258638926b-kube-api-access-68zqh\") pod \"47f8f609-ee6c-49c6-a617-9f258638926b\" (UID: \"47f8f609-ee6c-49c6-a617-9f258638926b\") " Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.385484 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47f8f609-ee6c-49c6-a617-9f258638926b-kube-api-access-68zqh" (OuterVolumeSpecName: "kube-api-access-68zqh") pod "47f8f609-ee6c-49c6-a617-9f258638926b" (UID: "47f8f609-ee6c-49c6-a617-9f258638926b"). InnerVolumeSpecName "kube-api-access-68zqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.482433 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68zqh\" (UniqueName: \"kubernetes.io/projected/47f8f609-ee6c-49c6-a617-9f258638926b-kube-api-access-68zqh\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.588750 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nddgp" event={"ID":"47f8f609-ee6c-49c6-a617-9f258638926b","Type":"ContainerDied","Data":"272ae235eebad86d3179b236c6dfcbdee40389e9c2a506916c1ce68ebea2b0b4"} Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.588818 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nddgp" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.588792 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="272ae235eebad86d3179b236c6dfcbdee40389e9c2a506916c1ce68ebea2b0b4" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.590772 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-r4b6l" event={"ID":"a57e275b-ff63-4284-aae2-7fbc858c0128","Type":"ContainerStarted","Data":"a3b4a4a3662ce3e7bd011cc0d04bc91aa4c63a914727fa606d835ea3f8fff68d"} Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.635801 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-r4b6l" podStartSLOduration=1.816812887 podStartE2EDuration="6.635779595s" podCreationTimestamp="2025-10-07 15:07:03 +0000 UTC" firstStartedPulling="2025-10-07 15:07:04.409722395 +0000 UTC m=+1101.384900976" lastFinishedPulling="2025-10-07 15:07:09.228689103 +0000 UTC m=+1106.203867684" observedRunningTime="2025-10-07 15:07:09.626149779 +0000 UTC m=+1106.601328370" watchObservedRunningTime="2025-10-07 15:07:09.635779595 +0000 UTC m=+1106.610958166" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.751215 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-h82gn"] Oct 07 15:07:09 crc kubenswrapper[4672]: E1007 15:07:09.751600 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe482f39-ac91-4461-bd30-b733a8c78137" containerName="dnsmasq-dns" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.751618 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe482f39-ac91-4461-bd30-b733a8c78137" containerName="dnsmasq-dns" Oct 07 15:07:09 crc kubenswrapper[4672]: E1007 15:07:09.751640 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe482f39-ac91-4461-bd30-b733a8c78137" containerName="init" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.751647 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe482f39-ac91-4461-bd30-b733a8c78137" containerName="init" Oct 07 15:07:09 crc kubenswrapper[4672]: E1007 15:07:09.751662 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47f8f609-ee6c-49c6-a617-9f258638926b" containerName="mariadb-database-create" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.751669 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="47f8f609-ee6c-49c6-a617-9f258638926b" containerName="mariadb-database-create" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.751819 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe482f39-ac91-4461-bd30-b733a8c78137" containerName="dnsmasq-dns" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.751835 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="47f8f609-ee6c-49c6-a617-9f258638926b" containerName="mariadb-database-create" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.752421 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-h82gn" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.756805 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-h82gn"] Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.890647 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwqqj\" (UniqueName: \"kubernetes.io/projected/5ac1a472-eb46-4de1-9833-3acb1d5ca8b9-kube-api-access-fwqqj\") pod \"keystone-db-create-h82gn\" (UID: \"5ac1a472-eb46-4de1-9833-3acb1d5ca8b9\") " pod="openstack/keystone-db-create-h82gn" Oct 07 15:07:09 crc kubenswrapper[4672]: I1007 15:07:09.992540 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwqqj\" (UniqueName: \"kubernetes.io/projected/5ac1a472-eb46-4de1-9833-3acb1d5ca8b9-kube-api-access-fwqqj\") pod \"keystone-db-create-h82gn\" (UID: \"5ac1a472-eb46-4de1-9833-3acb1d5ca8b9\") " pod="openstack/keystone-db-create-h82gn" Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.012198 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwqqj\" (UniqueName: \"kubernetes.io/projected/5ac1a472-eb46-4de1-9833-3acb1d5ca8b9-kube-api-access-fwqqj\") pod \"keystone-db-create-h82gn\" (UID: \"5ac1a472-eb46-4de1-9833-3acb1d5ca8b9\") " pod="openstack/keystone-db-create-h82gn" Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.037250 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-8sz2j"] Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.038492 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-8sz2j" Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.045354 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-8sz2j"] Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.067601 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-h82gn" Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.194952 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjrsp\" (UniqueName: \"kubernetes.io/projected/f5c70fe0-f6a3-4d99-b481-e252f5f4900f-kube-api-access-jjrsp\") pod \"placement-db-create-8sz2j\" (UID: \"f5c70fe0-f6a3-4d99-b481-e252f5f4900f\") " pod="openstack/placement-db-create-8sz2j" Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.297123 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjrsp\" (UniqueName: \"kubernetes.io/projected/f5c70fe0-f6a3-4d99-b481-e252f5f4900f-kube-api-access-jjrsp\") pod \"placement-db-create-8sz2j\" (UID: \"f5c70fe0-f6a3-4d99-b481-e252f5f4900f\") " pod="openstack/placement-db-create-8sz2j" Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.313187 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjrsp\" (UniqueName: \"kubernetes.io/projected/f5c70fe0-f6a3-4d99-b481-e252f5f4900f-kube-api-access-jjrsp\") pod \"placement-db-create-8sz2j\" (UID: \"f5c70fe0-f6a3-4d99-b481-e252f5f4900f\") " pod="openstack/placement-db-create-8sz2j" Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.352506 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-8sz2j" Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.484502 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-h82gn"] Oct 07 15:07:10 crc kubenswrapper[4672]: W1007 15:07:10.516646 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ac1a472_eb46_4de1_9833_3acb1d5ca8b9.slice/crio-7c801918cf5f41df89f60f832e9fe5475a10dfe4246b2ef1442f107d972e53c6 WatchSource:0}: Error finding container 7c801918cf5f41df89f60f832e9fe5475a10dfe4246b2ef1442f107d972e53c6: Status 404 returned error can't find the container with id 7c801918cf5f41df89f60f832e9fe5475a10dfe4246b2ef1442f107d972e53c6 Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.607682 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-h82gn" event={"ID":"5ac1a472-eb46-4de1-9833-3acb1d5ca8b9","Type":"ContainerStarted","Data":"7c801918cf5f41df89f60f832e9fe5475a10dfe4246b2ef1442f107d972e53c6"} Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.783097 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-8sz2j"] Oct 07 15:07:10 crc kubenswrapper[4672]: W1007 15:07:10.793768 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5c70fe0_f6a3_4d99_b481_e252f5f4900f.slice/crio-24a2785b5af6c221564787ee18a8fca3c694a0135f47468e08d2350c6e0d7275 WatchSource:0}: Error finding container 24a2785b5af6c221564787ee18a8fca3c694a0135f47468e08d2350c6e0d7275: Status 404 returned error can't find the container with id 24a2785b5af6c221564787ee18a8fca3c694a0135f47468e08d2350c6e0d7275 Oct 07 15:07:10 crc kubenswrapper[4672]: I1007 15:07:10.907913 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:10 crc kubenswrapper[4672]: E1007 15:07:10.908182 4672 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 15:07:10 crc kubenswrapper[4672]: E1007 15:07:10.908201 4672 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 15:07:10 crc kubenswrapper[4672]: E1007 15:07:10.908246 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift podName:99812f34-1d2c-4f0f-bf07-9569fde6d437 nodeName:}" failed. No retries permitted until 2025-10-07 15:07:18.908233932 +0000 UTC m=+1115.883412513 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift") pod "swift-storage-0" (UID: "99812f34-1d2c-4f0f-bf07-9569fde6d437") : configmap "swift-ring-files" not found Oct 07 15:07:11 crc kubenswrapper[4672]: I1007 15:07:11.618946 4672 generic.go:334] "Generic (PLEG): container finished" podID="f5c70fe0-f6a3-4d99-b481-e252f5f4900f" containerID="62ffe44deb835c7dfd59342fc879925d7f6d54d26c2f2e755fb22a37d54bf09f" exitCode=0 Oct 07 15:07:11 crc kubenswrapper[4672]: I1007 15:07:11.619066 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-8sz2j" event={"ID":"f5c70fe0-f6a3-4d99-b481-e252f5f4900f","Type":"ContainerDied","Data":"62ffe44deb835c7dfd59342fc879925d7f6d54d26c2f2e755fb22a37d54bf09f"} Oct 07 15:07:11 crc kubenswrapper[4672]: I1007 15:07:11.619323 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-8sz2j" event={"ID":"f5c70fe0-f6a3-4d99-b481-e252f5f4900f","Type":"ContainerStarted","Data":"24a2785b5af6c221564787ee18a8fca3c694a0135f47468e08d2350c6e0d7275"} Oct 07 15:07:11 crc kubenswrapper[4672]: I1007 15:07:11.622349 4672 generic.go:334] "Generic (PLEG): container finished" podID="5ac1a472-eb46-4de1-9833-3acb1d5ca8b9" containerID="55a3b5e6469791eec39168304d3db18cadae65df906ec7bddc279c718b10a8af" exitCode=0 Oct 07 15:07:11 crc kubenswrapper[4672]: I1007 15:07:11.622407 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-h82gn" event={"ID":"5ac1a472-eb46-4de1-9833-3acb1d5ca8b9","Type":"ContainerDied","Data":"55a3b5e6469791eec39168304d3db18cadae65df906ec7bddc279c718b10a8af"} Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.221977 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.273294 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b857bcbc9-w2drn"] Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.273541 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" podUID="e16c46b7-1761-476e-abd2-1cee19f91e63" containerName="dnsmasq-dns" containerID="cri-o://8bf2cce72a61eaca5fd0e00b7825c6bcfac1370d79d0a5eaf0aecb88980cfce2" gracePeriod=10 Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.632392 4672 generic.go:334] "Generic (PLEG): container finished" podID="e16c46b7-1761-476e-abd2-1cee19f91e63" containerID="8bf2cce72a61eaca5fd0e00b7825c6bcfac1370d79d0a5eaf0aecb88980cfce2" exitCode=0 Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.632492 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" event={"ID":"e16c46b7-1761-476e-abd2-1cee19f91e63","Type":"ContainerDied","Data":"8bf2cce72a61eaca5fd0e00b7825c6bcfac1370d79d0a5eaf0aecb88980cfce2"} Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.728980 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.837142 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t6kk\" (UniqueName: \"kubernetes.io/projected/e16c46b7-1761-476e-abd2-1cee19f91e63-kube-api-access-4t6kk\") pod \"e16c46b7-1761-476e-abd2-1cee19f91e63\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.837197 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-config\") pod \"e16c46b7-1761-476e-abd2-1cee19f91e63\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.837280 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-dns-svc\") pod \"e16c46b7-1761-476e-abd2-1cee19f91e63\" (UID: \"e16c46b7-1761-476e-abd2-1cee19f91e63\") " Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.872610 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e16c46b7-1761-476e-abd2-1cee19f91e63-kube-api-access-4t6kk" (OuterVolumeSpecName: "kube-api-access-4t6kk") pod "e16c46b7-1761-476e-abd2-1cee19f91e63" (UID: "e16c46b7-1761-476e-abd2-1cee19f91e63"). InnerVolumeSpecName "kube-api-access-4t6kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.939910 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t6kk\" (UniqueName: \"kubernetes.io/projected/e16c46b7-1761-476e-abd2-1cee19f91e63-kube-api-access-4t6kk\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.942348 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e16c46b7-1761-476e-abd2-1cee19f91e63" (UID: "e16c46b7-1761-476e-abd2-1cee19f91e63"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.943979 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-config" (OuterVolumeSpecName: "config") pod "e16c46b7-1761-476e-abd2-1cee19f91e63" (UID: "e16c46b7-1761-476e-abd2-1cee19f91e63"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:12 crc kubenswrapper[4672]: I1007 15:07:12.963650 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-8sz2j" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.028604 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-h82gn" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.040601 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjrsp\" (UniqueName: \"kubernetes.io/projected/f5c70fe0-f6a3-4d99-b481-e252f5f4900f-kube-api-access-jjrsp\") pod \"f5c70fe0-f6a3-4d99-b481-e252f5f4900f\" (UID: \"f5c70fe0-f6a3-4d99-b481-e252f5f4900f\") " Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.041135 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.041157 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e16c46b7-1761-476e-abd2-1cee19f91e63-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.047196 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5c70fe0-f6a3-4d99-b481-e252f5f4900f-kube-api-access-jjrsp" (OuterVolumeSpecName: "kube-api-access-jjrsp") pod "f5c70fe0-f6a3-4d99-b481-e252f5f4900f" (UID: "f5c70fe0-f6a3-4d99-b481-e252f5f4900f"). InnerVolumeSpecName "kube-api-access-jjrsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.141965 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwqqj\" (UniqueName: \"kubernetes.io/projected/5ac1a472-eb46-4de1-9833-3acb1d5ca8b9-kube-api-access-fwqqj\") pod \"5ac1a472-eb46-4de1-9833-3acb1d5ca8b9\" (UID: \"5ac1a472-eb46-4de1-9833-3acb1d5ca8b9\") " Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.142571 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjrsp\" (UniqueName: \"kubernetes.io/projected/f5c70fe0-f6a3-4d99-b481-e252f5f4900f-kube-api-access-jjrsp\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.145546 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ac1a472-eb46-4de1-9833-3acb1d5ca8b9-kube-api-access-fwqqj" (OuterVolumeSpecName: "kube-api-access-fwqqj") pod "5ac1a472-eb46-4de1-9833-3acb1d5ca8b9" (UID: "5ac1a472-eb46-4de1-9833-3acb1d5ca8b9"). InnerVolumeSpecName "kube-api-access-fwqqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.243975 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwqqj\" (UniqueName: \"kubernetes.io/projected/5ac1a472-eb46-4de1-9833-3acb1d5ca8b9-kube-api-access-fwqqj\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.641000 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-8sz2j" event={"ID":"f5c70fe0-f6a3-4d99-b481-e252f5f4900f","Type":"ContainerDied","Data":"24a2785b5af6c221564787ee18a8fca3c694a0135f47468e08d2350c6e0d7275"} Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.641085 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24a2785b5af6c221564787ee18a8fca3c694a0135f47468e08d2350c6e0d7275" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.641145 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-8sz2j" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.646530 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" event={"ID":"e16c46b7-1761-476e-abd2-1cee19f91e63","Type":"ContainerDied","Data":"b48aac42d3ff72d69f2a933bcbd308f66d594bf96ed5f4823163b3a33a0885f4"} Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.646589 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b857bcbc9-w2drn" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.646838 4672 scope.go:117] "RemoveContainer" containerID="8bf2cce72a61eaca5fd0e00b7825c6bcfac1370d79d0a5eaf0aecb88980cfce2" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.648496 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-h82gn" event={"ID":"5ac1a472-eb46-4de1-9833-3acb1d5ca8b9","Type":"ContainerDied","Data":"7c801918cf5f41df89f60f832e9fe5475a10dfe4246b2ef1442f107d972e53c6"} Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.648521 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c801918cf5f41df89f60f832e9fe5475a10dfe4246b2ef1442f107d972e53c6" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.648559 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-h82gn" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.683408 4672 scope.go:117] "RemoveContainer" containerID="1e8764d484f9e9ce3249aab067e036d8431fe8363e55c586805e9778e7d2da72" Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.699573 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b857bcbc9-w2drn"] Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.707468 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b857bcbc9-w2drn"] Oct 07 15:07:13 crc kubenswrapper[4672]: I1007 15:07:13.903677 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e16c46b7-1761-476e-abd2-1cee19f91e63" path="/var/lib/kubelet/pods/e16c46b7-1761-476e-abd2-1cee19f91e63/volumes" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.366164 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-0c0d-account-create-b22tw"] Oct 07 15:07:15 crc kubenswrapper[4672]: E1007 15:07:15.366797 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16c46b7-1761-476e-abd2-1cee19f91e63" containerName="init" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.366809 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16c46b7-1761-476e-abd2-1cee19f91e63" containerName="init" Oct 07 15:07:15 crc kubenswrapper[4672]: E1007 15:07:15.366826 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac1a472-eb46-4de1-9833-3acb1d5ca8b9" containerName="mariadb-database-create" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.366833 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac1a472-eb46-4de1-9833-3acb1d5ca8b9" containerName="mariadb-database-create" Oct 07 15:07:15 crc kubenswrapper[4672]: E1007 15:07:15.366847 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5c70fe0-f6a3-4d99-b481-e252f5f4900f" containerName="mariadb-database-create" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.366854 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5c70fe0-f6a3-4d99-b481-e252f5f4900f" containerName="mariadb-database-create" Oct 07 15:07:15 crc kubenswrapper[4672]: E1007 15:07:15.366864 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16c46b7-1761-476e-abd2-1cee19f91e63" containerName="dnsmasq-dns" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.366871 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16c46b7-1761-476e-abd2-1cee19f91e63" containerName="dnsmasq-dns" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.367041 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ac1a472-eb46-4de1-9833-3acb1d5ca8b9" containerName="mariadb-database-create" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.367060 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16c46b7-1761-476e-abd2-1cee19f91e63" containerName="dnsmasq-dns" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.367068 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5c70fe0-f6a3-4d99-b481-e252f5f4900f" containerName="mariadb-database-create" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.367564 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0c0d-account-create-b22tw" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.370095 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.375534 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-0c0d-account-create-b22tw"] Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.479186 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdcv8\" (UniqueName: \"kubernetes.io/projected/c16c5b8c-8e95-470e-8864-09d7c6af3d07-kube-api-access-mdcv8\") pod \"glance-0c0d-account-create-b22tw\" (UID: \"c16c5b8c-8e95-470e-8864-09d7c6af3d07\") " pod="openstack/glance-0c0d-account-create-b22tw" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.581246 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdcv8\" (UniqueName: \"kubernetes.io/projected/c16c5b8c-8e95-470e-8864-09d7c6af3d07-kube-api-access-mdcv8\") pod \"glance-0c0d-account-create-b22tw\" (UID: \"c16c5b8c-8e95-470e-8864-09d7c6af3d07\") " pod="openstack/glance-0c0d-account-create-b22tw" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.598681 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdcv8\" (UniqueName: \"kubernetes.io/projected/c16c5b8c-8e95-470e-8864-09d7c6af3d07-kube-api-access-mdcv8\") pod \"glance-0c0d-account-create-b22tw\" (UID: \"c16c5b8c-8e95-470e-8864-09d7c6af3d07\") " pod="openstack/glance-0c0d-account-create-b22tw" Oct 07 15:07:15 crc kubenswrapper[4672]: I1007 15:07:15.682710 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0c0d-account-create-b22tw" Oct 07 15:07:16 crc kubenswrapper[4672]: I1007 15:07:16.101345 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-0c0d-account-create-b22tw"] Oct 07 15:07:16 crc kubenswrapper[4672]: I1007 15:07:16.672941 4672 generic.go:334] "Generic (PLEG): container finished" podID="c16c5b8c-8e95-470e-8864-09d7c6af3d07" containerID="7423cb2a455bbf6eef147f2e0878691720cb299980732775460bfcee254b6b98" exitCode=0 Oct 07 15:07:16 crc kubenswrapper[4672]: I1007 15:07:16.672990 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0c0d-account-create-b22tw" event={"ID":"c16c5b8c-8e95-470e-8864-09d7c6af3d07","Type":"ContainerDied","Data":"7423cb2a455bbf6eef147f2e0878691720cb299980732775460bfcee254b6b98"} Oct 07 15:07:16 crc kubenswrapper[4672]: I1007 15:07:16.673332 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0c0d-account-create-b22tw" event={"ID":"c16c5b8c-8e95-470e-8864-09d7c6af3d07","Type":"ContainerStarted","Data":"424ada7350116c3b54eee089cce77e3d97977d9e302cae1de3204703835c628d"} Oct 07 15:07:16 crc kubenswrapper[4672]: I1007 15:07:16.675726 4672 generic.go:334] "Generic (PLEG): container finished" podID="a57e275b-ff63-4284-aae2-7fbc858c0128" containerID="a3b4a4a3662ce3e7bd011cc0d04bc91aa4c63a914727fa606d835ea3f8fff68d" exitCode=0 Oct 07 15:07:16 crc kubenswrapper[4672]: I1007 15:07:16.675770 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-r4b6l" event={"ID":"a57e275b-ff63-4284-aae2-7fbc858c0128","Type":"ContainerDied","Data":"a3b4a4a3662ce3e7bd011cc0d04bc91aa4c63a914727fa606d835ea3f8fff68d"} Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.037400 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0c0d-account-create-b22tw" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.042677 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.120149 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-combined-ca-bundle\") pod \"a57e275b-ff63-4284-aae2-7fbc858c0128\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.120244 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdcv8\" (UniqueName: \"kubernetes.io/projected/c16c5b8c-8e95-470e-8864-09d7c6af3d07-kube-api-access-mdcv8\") pod \"c16c5b8c-8e95-470e-8864-09d7c6af3d07\" (UID: \"c16c5b8c-8e95-470e-8864-09d7c6af3d07\") " Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.120316 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-ring-data-devices\") pod \"a57e275b-ff63-4284-aae2-7fbc858c0128\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.120343 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a57e275b-ff63-4284-aae2-7fbc858c0128-etc-swift\") pod \"a57e275b-ff63-4284-aae2-7fbc858c0128\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.120361 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-swiftconf\") pod \"a57e275b-ff63-4284-aae2-7fbc858c0128\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.120415 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-dispersionconf\") pod \"a57e275b-ff63-4284-aae2-7fbc858c0128\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.120568 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-scripts\") pod \"a57e275b-ff63-4284-aae2-7fbc858c0128\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.120764 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjlhd\" (UniqueName: \"kubernetes.io/projected/a57e275b-ff63-4284-aae2-7fbc858c0128-kube-api-access-gjlhd\") pod \"a57e275b-ff63-4284-aae2-7fbc858c0128\" (UID: \"a57e275b-ff63-4284-aae2-7fbc858c0128\") " Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.121030 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a57e275b-ff63-4284-aae2-7fbc858c0128" (UID: "a57e275b-ff63-4284-aae2-7fbc858c0128"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.121562 4672 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.121741 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a57e275b-ff63-4284-aae2-7fbc858c0128-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a57e275b-ff63-4284-aae2-7fbc858c0128" (UID: "a57e275b-ff63-4284-aae2-7fbc858c0128"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.125867 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c16c5b8c-8e95-470e-8864-09d7c6af3d07-kube-api-access-mdcv8" (OuterVolumeSpecName: "kube-api-access-mdcv8") pod "c16c5b8c-8e95-470e-8864-09d7c6af3d07" (UID: "c16c5b8c-8e95-470e-8864-09d7c6af3d07"). InnerVolumeSpecName "kube-api-access-mdcv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.126001 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a57e275b-ff63-4284-aae2-7fbc858c0128-kube-api-access-gjlhd" (OuterVolumeSpecName: "kube-api-access-gjlhd") pod "a57e275b-ff63-4284-aae2-7fbc858c0128" (UID: "a57e275b-ff63-4284-aae2-7fbc858c0128"). InnerVolumeSpecName "kube-api-access-gjlhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.128420 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a57e275b-ff63-4284-aae2-7fbc858c0128" (UID: "a57e275b-ff63-4284-aae2-7fbc858c0128"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.141365 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-scripts" (OuterVolumeSpecName: "scripts") pod "a57e275b-ff63-4284-aae2-7fbc858c0128" (UID: "a57e275b-ff63-4284-aae2-7fbc858c0128"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.142553 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a57e275b-ff63-4284-aae2-7fbc858c0128" (UID: "a57e275b-ff63-4284-aae2-7fbc858c0128"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.144554 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a57e275b-ff63-4284-aae2-7fbc858c0128" (UID: "a57e275b-ff63-4284-aae2-7fbc858c0128"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.222906 4672 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.222935 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a57e275b-ff63-4284-aae2-7fbc858c0128-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.222945 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjlhd\" (UniqueName: \"kubernetes.io/projected/a57e275b-ff63-4284-aae2-7fbc858c0128-kube-api-access-gjlhd\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.222957 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.222966 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdcv8\" (UniqueName: \"kubernetes.io/projected/c16c5b8c-8e95-470e-8864-09d7c6af3d07-kube-api-access-mdcv8\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.222975 4672 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a57e275b-ff63-4284-aae2-7fbc858c0128-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.222983 4672 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a57e275b-ff63-4284-aae2-7fbc858c0128-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.693947 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-r4b6l" event={"ID":"a57e275b-ff63-4284-aae2-7fbc858c0128","Type":"ContainerDied","Data":"f26e57580e84e7f8e322dcb34408cf11c6a1f2ccd063d9f060e2be871b0a6c17"} Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.693969 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r4b6l" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.693990 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f26e57580e84e7f8e322dcb34408cf11c6a1f2ccd063d9f060e2be871b0a6c17" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.695417 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0c0d-account-create-b22tw" event={"ID":"c16c5b8c-8e95-470e-8864-09d7c6af3d07","Type":"ContainerDied","Data":"424ada7350116c3b54eee089cce77e3d97977d9e302cae1de3204703835c628d"} Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.695571 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="424ada7350116c3b54eee089cce77e3d97977d9e302cae1de3204703835c628d" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.695470 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0c0d-account-create-b22tw" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.933355 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.938416 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/99812f34-1d2c-4f0f-bf07-9569fde6d437-etc-swift\") pod \"swift-storage-0\" (UID: \"99812f34-1d2c-4f0f-bf07-9569fde6d437\") " pod="openstack/swift-storage-0" Oct 07 15:07:18 crc kubenswrapper[4672]: I1007 15:07:18.971387 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.452460 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.704999 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"5f1e7f810e832049ff0567881f43d0cac228bd9162c9a1431456a9053a38f2e7"} Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.866174 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c94d-account-create-lr26x"] Oct 07 15:07:19 crc kubenswrapper[4672]: E1007 15:07:19.866857 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a57e275b-ff63-4284-aae2-7fbc858c0128" containerName="swift-ring-rebalance" Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.866879 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a57e275b-ff63-4284-aae2-7fbc858c0128" containerName="swift-ring-rebalance" Oct 07 15:07:19 crc kubenswrapper[4672]: E1007 15:07:19.866897 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c16c5b8c-8e95-470e-8864-09d7c6af3d07" containerName="mariadb-account-create" Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.866905 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c16c5b8c-8e95-470e-8864-09d7c6af3d07" containerName="mariadb-account-create" Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.867118 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="a57e275b-ff63-4284-aae2-7fbc858c0128" containerName="swift-ring-rebalance" Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.867152 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="c16c5b8c-8e95-470e-8864-09d7c6af3d07" containerName="mariadb-account-create" Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.867756 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c94d-account-create-lr26x" Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.870580 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.878429 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c94d-account-create-lr26x"] Oct 07 15:07:19 crc kubenswrapper[4672]: I1007 15:07:19.951778 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtgn4\" (UniqueName: \"kubernetes.io/projected/6552b769-4ac3-4f44-a977-6b7ccafe7df6-kube-api-access-vtgn4\") pod \"keystone-c94d-account-create-lr26x\" (UID: \"6552b769-4ac3-4f44-a977-6b7ccafe7df6\") " pod="openstack/keystone-c94d-account-create-lr26x" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.053614 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtgn4\" (UniqueName: \"kubernetes.io/projected/6552b769-4ac3-4f44-a977-6b7ccafe7df6-kube-api-access-vtgn4\") pod \"keystone-c94d-account-create-lr26x\" (UID: \"6552b769-4ac3-4f44-a977-6b7ccafe7df6\") " pod="openstack/keystone-c94d-account-create-lr26x" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.074190 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtgn4\" (UniqueName: \"kubernetes.io/projected/6552b769-4ac3-4f44-a977-6b7ccafe7df6-kube-api-access-vtgn4\") pod \"keystone-c94d-account-create-lr26x\" (UID: \"6552b769-4ac3-4f44-a977-6b7ccafe7df6\") " pod="openstack/keystone-c94d-account-create-lr26x" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.198295 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c94d-account-create-lr26x" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.269079 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-e9f3-account-create-xrvl5"] Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.271674 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e9f3-account-create-xrvl5" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.273865 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.302827 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e9f3-account-create-xrvl5"] Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.359466 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glgj9\" (UniqueName: \"kubernetes.io/projected/4f7b5c37-6793-455f-a313-0e620aac3401-kube-api-access-glgj9\") pod \"placement-e9f3-account-create-xrvl5\" (UID: \"4f7b5c37-6793-455f-a313-0e620aac3401\") " pod="openstack/placement-e9f3-account-create-xrvl5" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.460958 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glgj9\" (UniqueName: \"kubernetes.io/projected/4f7b5c37-6793-455f-a313-0e620aac3401-kube-api-access-glgj9\") pod \"placement-e9f3-account-create-xrvl5\" (UID: \"4f7b5c37-6793-455f-a313-0e620aac3401\") " pod="openstack/placement-e9f3-account-create-xrvl5" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.478883 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glgj9\" (UniqueName: \"kubernetes.io/projected/4f7b5c37-6793-455f-a313-0e620aac3401-kube-api-access-glgj9\") pod \"placement-e9f3-account-create-xrvl5\" (UID: \"4f7b5c37-6793-455f-a313-0e620aac3401\") " pod="openstack/placement-e9f3-account-create-xrvl5" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.586243 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-4bcjh"] Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.588620 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.590615 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-x7z9z" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.593597 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-4bcjh"] Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.597415 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.659549 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e9f3-account-create-xrvl5" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.660981 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c94d-account-create-lr26x"] Oct 07 15:07:20 crc kubenswrapper[4672]: W1007 15:07:20.662061 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6552b769_4ac3_4f44_a977_6b7ccafe7df6.slice/crio-c92555903cbc6801da77e7698b327d50060b5452ed19ee0524fb0b095e227d35 WatchSource:0}: Error finding container c92555903cbc6801da77e7698b327d50060b5452ed19ee0524fb0b095e227d35: Status 404 returned error can't find the container with id c92555903cbc6801da77e7698b327d50060b5452ed19ee0524fb0b095e227d35 Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.665301 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-config-data\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.665356 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn47p\" (UniqueName: \"kubernetes.io/projected/c3158784-bab1-4e87-957c-cce0a7180f6f-kube-api-access-cn47p\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.665380 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-combined-ca-bundle\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.665402 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-db-sync-config-data\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.716021 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c94d-account-create-lr26x" event={"ID":"6552b769-4ac3-4f44-a977-6b7ccafe7df6","Type":"ContainerStarted","Data":"c92555903cbc6801da77e7698b327d50060b5452ed19ee0524fb0b095e227d35"} Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.719486 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"ad015df21be33779030bcdb6867fbfa3d53d1f43bccd913b7826e6d5e5f035e3"} Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.719608 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"647103fcc3bf910a23720a7d76d05895b4c56b42e8cf3371b60424d431fcb53a"} Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.768302 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-config-data\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.768410 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn47p\" (UniqueName: \"kubernetes.io/projected/c3158784-bab1-4e87-957c-cce0a7180f6f-kube-api-access-cn47p\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.768441 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-combined-ca-bundle\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.768468 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-db-sync-config-data\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.777129 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-config-data\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.778746 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-combined-ca-bundle\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.779104 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-db-sync-config-data\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.791737 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn47p\" (UniqueName: \"kubernetes.io/projected/c3158784-bab1-4e87-957c-cce0a7180f6f-kube-api-access-cn47p\") pod \"glance-db-sync-4bcjh\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:20 crc kubenswrapper[4672]: I1007 15:07:20.908970 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.093634 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-tzsfl" podUID="0e4d1227-361f-4c04-b0ce-12295f021364" containerName="ovn-controller" probeResult="failure" output=< Oct 07 15:07:21 crc kubenswrapper[4672]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 07 15:07:21 crc kubenswrapper[4672]: > Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.100624 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.114230 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5p5sl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.167088 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e9f3-account-create-xrvl5"] Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.332775 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-tzsfl-config-r9lxl"] Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.335923 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.337711 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.355423 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-tzsfl-config-r9lxl"] Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.492494 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.492561 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-additional-scripts\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.492594 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-scripts\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.492612 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-log-ovn\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.492699 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run-ovn\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.492739 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsb94\" (UniqueName: \"kubernetes.io/projected/96a383c6-b20c-4937-8b67-34bb7cf75833-kube-api-access-jsb94\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.522812 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-4bcjh"] Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.594008 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run-ovn\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.594093 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsb94\" (UniqueName: \"kubernetes.io/projected/96a383c6-b20c-4937-8b67-34bb7cf75833-kube-api-access-jsb94\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.594469 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.594474 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run-ovn\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.594498 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.594759 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-additional-scripts\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.594842 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-scripts\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.594918 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-log-ovn\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.595069 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-log-ovn\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.595706 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-additional-scripts\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.597631 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-scripts\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.615114 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsb94\" (UniqueName: \"kubernetes.io/projected/96a383c6-b20c-4937-8b67-34bb7cf75833-kube-api-access-jsb94\") pod \"ovn-controller-tzsfl-config-r9lxl\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: W1007 15:07:21.650605 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3158784_bab1_4e87_957c_cce0a7180f6f.slice/crio-9f0c887bcbb9dafa770b43dfc98a1d4b67cea95a95ac7cbe36119fd6591f2954 WatchSource:0}: Error finding container 9f0c887bcbb9dafa770b43dfc98a1d4b67cea95a95ac7cbe36119fd6591f2954: Status 404 returned error can't find the container with id 9f0c887bcbb9dafa770b43dfc98a1d4b67cea95a95ac7cbe36119fd6591f2954 Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.668369 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.740825 4672 generic.go:334] "Generic (PLEG): container finished" podID="4f7b5c37-6793-455f-a313-0e620aac3401" containerID="64abc7d214a6cc5bb62915bec45e54fd620d629947970f50dcabd90db3ee7721" exitCode=0 Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.740942 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e9f3-account-create-xrvl5" event={"ID":"4f7b5c37-6793-455f-a313-0e620aac3401","Type":"ContainerDied","Data":"64abc7d214a6cc5bb62915bec45e54fd620d629947970f50dcabd90db3ee7721"} Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.740997 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e9f3-account-create-xrvl5" event={"ID":"4f7b5c37-6793-455f-a313-0e620aac3401","Type":"ContainerStarted","Data":"4224290e286aaf5fcc88903d85fb02e7e1858417c8ccc47e264a86e2d6a224aa"} Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.749547 4672 generic.go:334] "Generic (PLEG): container finished" podID="6552b769-4ac3-4f44-a977-6b7ccafe7df6" containerID="7d0553f00eefaa6c52e0d01829263676537f3e46f440ed86ba734093b2bb0c78" exitCode=0 Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.749590 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c94d-account-create-lr26x" event={"ID":"6552b769-4ac3-4f44-a977-6b7ccafe7df6","Type":"ContainerDied","Data":"7d0553f00eefaa6c52e0d01829263676537f3e46f440ed86ba734093b2bb0c78"} Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.751803 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4bcjh" event={"ID":"c3158784-bab1-4e87-957c-cce0a7180f6f","Type":"ContainerStarted","Data":"9f0c887bcbb9dafa770b43dfc98a1d4b67cea95a95ac7cbe36119fd6591f2954"} Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.764883 4672 generic.go:334] "Generic (PLEG): container finished" podID="1afcb6ce-1241-4930-8639-bee8a9a76d11" containerID="00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9" exitCode=0 Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.765010 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1afcb6ce-1241-4930-8639-bee8a9a76d11","Type":"ContainerDied","Data":"00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9"} Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.767337 4672 generic.go:334] "Generic (PLEG): container finished" podID="d4a0a2c2-a878-473c-86da-5a74aa392982" containerID="35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9" exitCode=0 Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.767416 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4a0a2c2-a878-473c-86da-5a74aa392982","Type":"ContainerDied","Data":"35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9"} Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.775790 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"3c992c68a9b7bae061bb18ad57fd2e0a5f95011e2907e3e21d80242c67f4284c"} Oct 07 15:07:21 crc kubenswrapper[4672]: I1007 15:07:21.775853 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"8d9a25e927accdfb26649dc604dcc3572505d567f62e2344d57fcac2578a87ba"} Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.315226 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-tzsfl-config-r9lxl"] Oct 07 15:07:22 crc kubenswrapper[4672]: W1007 15:07:22.320060 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96a383c6_b20c_4937_8b67_34bb7cf75833.slice/crio-36c760f35aa55c578637dcba46a5c91f7f625854478cfff93db6ede5db6789f7 WatchSource:0}: Error finding container 36c760f35aa55c578637dcba46a5c91f7f625854478cfff93db6ede5db6789f7: Status 404 returned error can't find the container with id 36c760f35aa55c578637dcba46a5c91f7f625854478cfff93db6ede5db6789f7 Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.784532 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4a0a2c2-a878-473c-86da-5a74aa392982","Type":"ContainerStarted","Data":"7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40"} Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.784749 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.795755 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"51666b5162d50c0428e2728da29d563c3eb627a976fe62ea4b34cdc4240bc357"} Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.795807 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"ef8b1e43af8363fb01199cd1d39aeb73e893614ad100677fd591616a46c3ce89"} Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.795822 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"5679c47dd79e9e1e03f0c5f5ef72ac7372640e413f718825aa54c15794f34883"} Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.795833 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"d2ee166feefbefb883bc9d754e0ab6f373f61cd0a52f55681bb8227db92fb9f4"} Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.797885 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-tzsfl-config-r9lxl" event={"ID":"96a383c6-b20c-4937-8b67-34bb7cf75833","Type":"ContainerStarted","Data":"7b52ee40d1b537c62ddfe2baf9d73827602317856984abd144d04875a6ab0911"} Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.797938 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-tzsfl-config-r9lxl" event={"ID":"96a383c6-b20c-4937-8b67-34bb7cf75833","Type":"ContainerStarted","Data":"36c760f35aa55c578637dcba46a5c91f7f625854478cfff93db6ede5db6789f7"} Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.801837 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1afcb6ce-1241-4930-8639-bee8a9a76d11","Type":"ContainerStarted","Data":"fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723"} Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.802851 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.835262 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=50.302517454 podStartE2EDuration="57.835236325s" podCreationTimestamp="2025-10-07 15:06:25 +0000 UTC" firstStartedPulling="2025-10-07 15:06:39.374113744 +0000 UTC m=+1076.349292325" lastFinishedPulling="2025-10-07 15:06:46.906832615 +0000 UTC m=+1083.882011196" observedRunningTime="2025-10-07 15:07:22.816786235 +0000 UTC m=+1119.791964826" watchObservedRunningTime="2025-10-07 15:07:22.835236325 +0000 UTC m=+1119.810414906" Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.839997 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-tzsfl-config-r9lxl" podStartSLOduration=1.839977231 podStartE2EDuration="1.839977231s" podCreationTimestamp="2025-10-07 15:07:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:07:22.834684179 +0000 UTC m=+1119.809862780" watchObservedRunningTime="2025-10-07 15:07:22.839977231 +0000 UTC m=+1119.815155812" Oct 07 15:07:22 crc kubenswrapper[4672]: I1007 15:07:22.863821 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=51.22748108 podStartE2EDuration="57.863801365s" podCreationTimestamp="2025-10-07 15:06:25 +0000 UTC" firstStartedPulling="2025-10-07 15:06:39.648252768 +0000 UTC m=+1076.623431349" lastFinishedPulling="2025-10-07 15:06:46.284573053 +0000 UTC m=+1083.259751634" observedRunningTime="2025-10-07 15:07:22.856555867 +0000 UTC m=+1119.831734458" watchObservedRunningTime="2025-10-07 15:07:22.863801365 +0000 UTC m=+1119.838979946" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.501804 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c94d-account-create-lr26x" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.547855 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e9f3-account-create-xrvl5" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.631463 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtgn4\" (UniqueName: \"kubernetes.io/projected/6552b769-4ac3-4f44-a977-6b7ccafe7df6-kube-api-access-vtgn4\") pod \"6552b769-4ac3-4f44-a977-6b7ccafe7df6\" (UID: \"6552b769-4ac3-4f44-a977-6b7ccafe7df6\") " Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.631986 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glgj9\" (UniqueName: \"kubernetes.io/projected/4f7b5c37-6793-455f-a313-0e620aac3401-kube-api-access-glgj9\") pod \"4f7b5c37-6793-455f-a313-0e620aac3401\" (UID: \"4f7b5c37-6793-455f-a313-0e620aac3401\") " Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.638308 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6552b769-4ac3-4f44-a977-6b7ccafe7df6-kube-api-access-vtgn4" (OuterVolumeSpecName: "kube-api-access-vtgn4") pod "6552b769-4ac3-4f44-a977-6b7ccafe7df6" (UID: "6552b769-4ac3-4f44-a977-6b7ccafe7df6"). InnerVolumeSpecName "kube-api-access-vtgn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.638853 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7b5c37-6793-455f-a313-0e620aac3401-kube-api-access-glgj9" (OuterVolumeSpecName: "kube-api-access-glgj9") pod "4f7b5c37-6793-455f-a313-0e620aac3401" (UID: "4f7b5c37-6793-455f-a313-0e620aac3401"). InnerVolumeSpecName "kube-api-access-glgj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.733853 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glgj9\" (UniqueName: \"kubernetes.io/projected/4f7b5c37-6793-455f-a313-0e620aac3401-kube-api-access-glgj9\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.733883 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtgn4\" (UniqueName: \"kubernetes.io/projected/6552b769-4ac3-4f44-a977-6b7ccafe7df6-kube-api-access-vtgn4\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.828892 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"6a10a978d468a28fc8573db08e55c32c69cc7628f873762996efb5e1d48f213e"} Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.828935 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"9ae065c829dd86f45fd3fdde4508c19bc1fe3b6c508fb5f88fd58dc3830ac080"} Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.832007 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e9f3-account-create-xrvl5" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.832081 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e9f3-account-create-xrvl5" event={"ID":"4f7b5c37-6793-455f-a313-0e620aac3401","Type":"ContainerDied","Data":"4224290e286aaf5fcc88903d85fb02e7e1858417c8ccc47e264a86e2d6a224aa"} Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.832124 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4224290e286aaf5fcc88903d85fb02e7e1858417c8ccc47e264a86e2d6a224aa" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.835629 4672 generic.go:334] "Generic (PLEG): container finished" podID="96a383c6-b20c-4937-8b67-34bb7cf75833" containerID="7b52ee40d1b537c62ddfe2baf9d73827602317856984abd144d04875a6ab0911" exitCode=0 Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.835693 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-tzsfl-config-r9lxl" event={"ID":"96a383c6-b20c-4937-8b67-34bb7cf75833","Type":"ContainerDied","Data":"7b52ee40d1b537c62ddfe2baf9d73827602317856984abd144d04875a6ab0911"} Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.843850 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c94d-account-create-lr26x" event={"ID":"6552b769-4ac3-4f44-a977-6b7ccafe7df6","Type":"ContainerDied","Data":"c92555903cbc6801da77e7698b327d50060b5452ed19ee0524fb0b095e227d35"} Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.843903 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c92555903cbc6801da77e7698b327d50060b5452ed19ee0524fb0b095e227d35" Oct 07 15:07:23 crc kubenswrapper[4672]: I1007 15:07:23.845042 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c94d-account-create-lr26x" Oct 07 15:07:24 crc kubenswrapper[4672]: I1007 15:07:24.859768 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"b84cecd3066e7a6a03ba886838d6ec1a6efddbeefcf712de48c07d719c29030e"} Oct 07 15:07:24 crc kubenswrapper[4672]: I1007 15:07:24.860097 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"142fcb51fef0e43084af1498866ecb775d3d4105b1605556a8572510e7fe448f"} Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.227188 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.267413 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-log-ovn\") pod \"96a383c6-b20c-4937-8b67-34bb7cf75833\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.267607 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run-ovn\") pod \"96a383c6-b20c-4937-8b67-34bb7cf75833\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.267641 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-additional-scripts\") pod \"96a383c6-b20c-4937-8b67-34bb7cf75833\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.267660 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-scripts\") pod \"96a383c6-b20c-4937-8b67-34bb7cf75833\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.267713 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsb94\" (UniqueName: \"kubernetes.io/projected/96a383c6-b20c-4937-8b67-34bb7cf75833-kube-api-access-jsb94\") pod \"96a383c6-b20c-4937-8b67-34bb7cf75833\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.267775 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run\") pod \"96a383c6-b20c-4937-8b67-34bb7cf75833\" (UID: \"96a383c6-b20c-4937-8b67-34bb7cf75833\") " Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.268115 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run" (OuterVolumeSpecName: "var-run") pod "96a383c6-b20c-4937-8b67-34bb7cf75833" (UID: "96a383c6-b20c-4937-8b67-34bb7cf75833"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.268549 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "96a383c6-b20c-4937-8b67-34bb7cf75833" (UID: "96a383c6-b20c-4937-8b67-34bb7cf75833"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.268793 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "96a383c6-b20c-4937-8b67-34bb7cf75833" (UID: "96a383c6-b20c-4937-8b67-34bb7cf75833"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.268900 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "96a383c6-b20c-4937-8b67-34bb7cf75833" (UID: "96a383c6-b20c-4937-8b67-34bb7cf75833"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.273223 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-scripts" (OuterVolumeSpecName: "scripts") pod "96a383c6-b20c-4937-8b67-34bb7cf75833" (UID: "96a383c6-b20c-4937-8b67-34bb7cf75833"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.293810 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96a383c6-b20c-4937-8b67-34bb7cf75833-kube-api-access-jsb94" (OuterVolumeSpecName: "kube-api-access-jsb94") pod "96a383c6-b20c-4937-8b67-34bb7cf75833" (UID: "96a383c6-b20c-4937-8b67-34bb7cf75833"). InnerVolumeSpecName "kube-api-access-jsb94". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.368718 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsb94\" (UniqueName: \"kubernetes.io/projected/96a383c6-b20c-4937-8b67-34bb7cf75833-kube-api-access-jsb94\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.368805 4672 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.368818 4672 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.368829 4672 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/96a383c6-b20c-4937-8b67-34bb7cf75833-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.368840 4672 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.368850 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96a383c6-b20c-4937-8b67-34bb7cf75833-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.879764 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-tzsfl-config-r9lxl" event={"ID":"96a383c6-b20c-4937-8b67-34bb7cf75833","Type":"ContainerDied","Data":"36c760f35aa55c578637dcba46a5c91f7f625854478cfff93db6ede5db6789f7"} Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.879812 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36c760f35aa55c578637dcba46a5c91f7f625854478cfff93db6ede5db6789f7" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.879881 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-tzsfl-config-r9lxl" Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.957078 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-tzsfl-config-r9lxl"] Oct 07 15:07:25 crc kubenswrapper[4672]: I1007 15:07:25.960882 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-tzsfl-config-r9lxl"] Oct 07 15:07:26 crc kubenswrapper[4672]: I1007 15:07:26.072823 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-tzsfl" Oct 07 15:07:26 crc kubenswrapper[4672]: I1007 15:07:26.655313 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:07:26 crc kubenswrapper[4672]: I1007 15:07:26.655657 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:07:26 crc kubenswrapper[4672]: I1007 15:07:26.895104 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"bd1694be42f68914d8f24a316e6107b8898bc56252e090690d5a08adfe679c2d"} Oct 07 15:07:26 crc kubenswrapper[4672]: I1007 15:07:26.895148 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"13c30227bbfa4ed854ca8c94e262e025463b97a700a0a545a254cb704c86c190"} Oct 07 15:07:26 crc kubenswrapper[4672]: I1007 15:07:26.895162 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"99812f34-1d2c-4f0f-bf07-9569fde6d437","Type":"ContainerStarted","Data":"3c9a1a9802749f6bf69a3e398d8e60b3e8b3fbc4669c148ad91ee76853f7cde8"} Oct 07 15:07:26 crc kubenswrapper[4672]: I1007 15:07:26.935322 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.93085605 podStartE2EDuration="24.935305524s" podCreationTimestamp="2025-10-07 15:07:02 +0000 UTC" firstStartedPulling="2025-10-07 15:07:19.462792182 +0000 UTC m=+1116.437970763" lastFinishedPulling="2025-10-07 15:07:23.467241656 +0000 UTC m=+1120.442420237" observedRunningTime="2025-10-07 15:07:26.934993215 +0000 UTC m=+1123.910171796" watchObservedRunningTime="2025-10-07 15:07:26.935305524 +0000 UTC m=+1123.910484105" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.267113 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5988746689-84wxb"] Oct 07 15:07:27 crc kubenswrapper[4672]: E1007 15:07:27.279617 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96a383c6-b20c-4937-8b67-34bb7cf75833" containerName="ovn-config" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.279662 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="96a383c6-b20c-4937-8b67-34bb7cf75833" containerName="ovn-config" Oct 07 15:07:27 crc kubenswrapper[4672]: E1007 15:07:27.279692 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7b5c37-6793-455f-a313-0e620aac3401" containerName="mariadb-account-create" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.279702 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7b5c37-6793-455f-a313-0e620aac3401" containerName="mariadb-account-create" Oct 07 15:07:27 crc kubenswrapper[4672]: E1007 15:07:27.279740 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6552b769-4ac3-4f44-a977-6b7ccafe7df6" containerName="mariadb-account-create" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.279748 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="6552b769-4ac3-4f44-a977-6b7ccafe7df6" containerName="mariadb-account-create" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.280263 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="6552b769-4ac3-4f44-a977-6b7ccafe7df6" containerName="mariadb-account-create" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.280337 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7b5c37-6793-455f-a313-0e620aac3401" containerName="mariadb-account-create" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.280372 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="96a383c6-b20c-4937-8b67-34bb7cf75833" containerName="ovn-config" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.281571 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.285309 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.298432 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5988746689-84wxb"] Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.407613 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-config\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.407661 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-svc\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.407887 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-nb\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.407973 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-swift-storage-0\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.408028 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-sb\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.408179 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j97qb\" (UniqueName: \"kubernetes.io/projected/de394e3e-48ba-4410-8fb2-a99e03f27ac9-kube-api-access-j97qb\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.509424 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-nb\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.509493 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-swift-storage-0\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.509524 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-sb\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.509600 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j97qb\" (UniqueName: \"kubernetes.io/projected/de394e3e-48ba-4410-8fb2-a99e03f27ac9-kube-api-access-j97qb\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.509637 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-config\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.509665 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-svc\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.510525 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-sb\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.510593 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-svc\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.510878 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-nb\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.511138 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-config\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.514585 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-swift-storage-0\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.550404 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j97qb\" (UniqueName: \"kubernetes.io/projected/de394e3e-48ba-4410-8fb2-a99e03f27ac9-kube-api-access-j97qb\") pod \"dnsmasq-dns-5988746689-84wxb\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.601533 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:27 crc kubenswrapper[4672]: I1007 15:07:27.907967 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96a383c6-b20c-4937-8b67-34bb7cf75833" path="/var/lib/kubelet/pods/96a383c6-b20c-4937-8b67-34bb7cf75833/volumes" Oct 07 15:07:28 crc kubenswrapper[4672]: I1007 15:07:28.114372 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5988746689-84wxb"] Oct 07 15:07:28 crc kubenswrapper[4672]: I1007 15:07:28.918779 4672 generic.go:334] "Generic (PLEG): container finished" podID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" containerID="f3c277ade9b0539e7eda32fb469869489269533924912e570901900e27e6c93d" exitCode=0 Oct 07 15:07:28 crc kubenswrapper[4672]: I1007 15:07:28.918838 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5988746689-84wxb" event={"ID":"de394e3e-48ba-4410-8fb2-a99e03f27ac9","Type":"ContainerDied","Data":"f3c277ade9b0539e7eda32fb469869489269533924912e570901900e27e6c93d"} Oct 07 15:07:28 crc kubenswrapper[4672]: I1007 15:07:28.918869 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5988746689-84wxb" event={"ID":"de394e3e-48ba-4410-8fb2-a99e03f27ac9","Type":"ContainerStarted","Data":"ff5d04f4db83136ce8b5d73fdba4fa57015b50dd3b30279a9e2065eac657d1b8"} Oct 07 15:07:35 crc kubenswrapper[4672]: I1007 15:07:35.986938 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5988746689-84wxb" event={"ID":"de394e3e-48ba-4410-8fb2-a99e03f27ac9","Type":"ContainerStarted","Data":"722775be4f4cf597f13cd7fe6551e06a16fb3d5f711ca723258e94584b9d39ee"} Oct 07 15:07:35 crc kubenswrapper[4672]: I1007 15:07:35.988223 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:36 crc kubenswrapper[4672]: I1007 15:07:36.007850 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5988746689-84wxb" podStartSLOduration=9.007832142 podStartE2EDuration="9.007832142s" podCreationTimestamp="2025-10-07 15:07:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:07:36.00742595 +0000 UTC m=+1132.982604531" watchObservedRunningTime="2025-10-07 15:07:36.007832142 +0000 UTC m=+1132.983010723" Oct 07 15:07:36 crc kubenswrapper[4672]: I1007 15:07:36.994821 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4bcjh" event={"ID":"c3158784-bab1-4e87-957c-cce0a7180f6f","Type":"ContainerStarted","Data":"cb3935663bce77023993755d95c01c007a5b46217b9dcbf39d52c4afca178b15"} Oct 07 15:07:37 crc kubenswrapper[4672]: I1007 15:07:37.016208 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-4bcjh" podStartSLOduration=3.031560204 podStartE2EDuration="17.016192714s" podCreationTimestamp="2025-10-07 15:07:20 +0000 UTC" firstStartedPulling="2025-10-07 15:07:21.653671528 +0000 UTC m=+1118.628850109" lastFinishedPulling="2025-10-07 15:07:35.638304038 +0000 UTC m=+1132.613482619" observedRunningTime="2025-10-07 15:07:37.014569127 +0000 UTC m=+1133.989747718" watchObservedRunningTime="2025-10-07 15:07:37.016192714 +0000 UTC m=+1133.991371295" Oct 07 15:07:37 crc kubenswrapper[4672]: I1007 15:07:37.029206 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:07:37 crc kubenswrapper[4672]: I1007 15:07:37.078245 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 07 15:07:38 crc kubenswrapper[4672]: I1007 15:07:38.817841 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-6fl7p"] Oct 07 15:07:38 crc kubenswrapper[4672]: I1007 15:07:38.820346 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6fl7p" Oct 07 15:07:38 crc kubenswrapper[4672]: I1007 15:07:38.832355 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6fl7p"] Oct 07 15:07:38 crc kubenswrapper[4672]: I1007 15:07:38.900427 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8jmd\" (UniqueName: \"kubernetes.io/projected/303b2f6b-3e7b-4f25-b480-e2a74863215a-kube-api-access-z8jmd\") pod \"cinder-db-create-6fl7p\" (UID: \"303b2f6b-3e7b-4f25-b480-e2a74863215a\") " pod="openstack/cinder-db-create-6fl7p" Oct 07 15:07:38 crc kubenswrapper[4672]: I1007 15:07:38.901284 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-8l95r"] Oct 07 15:07:38 crc kubenswrapper[4672]: I1007 15:07:38.902617 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8l95r" Oct 07 15:07:38 crc kubenswrapper[4672]: I1007 15:07:38.919766 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8l95r"] Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.001738 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8jmd\" (UniqueName: \"kubernetes.io/projected/303b2f6b-3e7b-4f25-b480-e2a74863215a-kube-api-access-z8jmd\") pod \"cinder-db-create-6fl7p\" (UID: \"303b2f6b-3e7b-4f25-b480-e2a74863215a\") " pod="openstack/cinder-db-create-6fl7p" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.001984 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnlhl\" (UniqueName: \"kubernetes.io/projected/5d130321-61d8-41eb-972e-c0a9b7eed5e2-kube-api-access-wnlhl\") pod \"barbican-db-create-8l95r\" (UID: \"5d130321-61d8-41eb-972e-c0a9b7eed5e2\") " pod="openstack/barbican-db-create-8l95r" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.030548 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8jmd\" (UniqueName: \"kubernetes.io/projected/303b2f6b-3e7b-4f25-b480-e2a74863215a-kube-api-access-z8jmd\") pod \"cinder-db-create-6fl7p\" (UID: \"303b2f6b-3e7b-4f25-b480-e2a74863215a\") " pod="openstack/cinder-db-create-6fl7p" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.104453 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnlhl\" (UniqueName: \"kubernetes.io/projected/5d130321-61d8-41eb-972e-c0a9b7eed5e2-kube-api-access-wnlhl\") pod \"barbican-db-create-8l95r\" (UID: \"5d130321-61d8-41eb-972e-c0a9b7eed5e2\") " pod="openstack/barbican-db-create-8l95r" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.125550 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnlhl\" (UniqueName: \"kubernetes.io/projected/5d130321-61d8-41eb-972e-c0a9b7eed5e2-kube-api-access-wnlhl\") pod \"barbican-db-create-8l95r\" (UID: \"5d130321-61d8-41eb-972e-c0a9b7eed5e2\") " pod="openstack/barbican-db-create-8l95r" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.140219 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6fl7p" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.207954 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-nvjxx"] Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.209604 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nvjxx" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.217350 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8l95r" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.231589 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-nvjxx"] Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.284807 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-qw6h4"] Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.287454 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.291440 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.291637 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-rtlqr" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.291738 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.291937 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.305726 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qw6h4"] Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.307104 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdrcr\" (UniqueName: \"kubernetes.io/projected/4272a5a5-5cd2-4045-a1d7-735f5d8e3479-kube-api-access-fdrcr\") pod \"neutron-db-create-nvjxx\" (UID: \"4272a5a5-5cd2-4045-a1d7-735f5d8e3479\") " pod="openstack/neutron-db-create-nvjxx" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.408783 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdrcr\" (UniqueName: \"kubernetes.io/projected/4272a5a5-5cd2-4045-a1d7-735f5d8e3479-kube-api-access-fdrcr\") pod \"neutron-db-create-nvjxx\" (UID: \"4272a5a5-5cd2-4045-a1d7-735f5d8e3479\") " pod="openstack/neutron-db-create-nvjxx" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.408932 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-config-data\") pod \"keystone-db-sync-qw6h4\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.408967 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-combined-ca-bundle\") pod \"keystone-db-sync-qw6h4\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.408993 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxdcv\" (UniqueName: \"kubernetes.io/projected/c36d7f75-c0e2-4884-9556-e2578247b813-kube-api-access-qxdcv\") pod \"keystone-db-sync-qw6h4\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.432983 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdrcr\" (UniqueName: \"kubernetes.io/projected/4272a5a5-5cd2-4045-a1d7-735f5d8e3479-kube-api-access-fdrcr\") pod \"neutron-db-create-nvjxx\" (UID: \"4272a5a5-5cd2-4045-a1d7-735f5d8e3479\") " pod="openstack/neutron-db-create-nvjxx" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.510537 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-config-data\") pod \"keystone-db-sync-qw6h4\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.510589 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-combined-ca-bundle\") pod \"keystone-db-sync-qw6h4\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.510615 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxdcv\" (UniqueName: \"kubernetes.io/projected/c36d7f75-c0e2-4884-9556-e2578247b813-kube-api-access-qxdcv\") pod \"keystone-db-sync-qw6h4\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.514128 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-combined-ca-bundle\") pod \"keystone-db-sync-qw6h4\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.514781 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-config-data\") pod \"keystone-db-sync-qw6h4\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.527115 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxdcv\" (UniqueName: \"kubernetes.io/projected/c36d7f75-c0e2-4884-9556-e2578247b813-kube-api-access-qxdcv\") pod \"keystone-db-sync-qw6h4\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.534247 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nvjxx" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.617963 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.664448 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6fl7p"] Oct 07 15:07:39 crc kubenswrapper[4672]: W1007 15:07:39.677905 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod303b2f6b_3e7b_4f25_b480_e2a74863215a.slice/crio-f376e667890422002d970e6e0d6f260bc63b50bd5fe1bd43fc199dc45a1ce8bd WatchSource:0}: Error finding container f376e667890422002d970e6e0d6f260bc63b50bd5fe1bd43fc199dc45a1ce8bd: Status 404 returned error can't find the container with id f376e667890422002d970e6e0d6f260bc63b50bd5fe1bd43fc199dc45a1ce8bd Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.786217 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8l95r"] Oct 07 15:07:39 crc kubenswrapper[4672]: I1007 15:07:39.842029 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-nvjxx"] Oct 07 15:07:40 crc kubenswrapper[4672]: I1007 15:07:40.001477 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qw6h4"] Oct 07 15:07:40 crc kubenswrapper[4672]: W1007 15:07:40.004399 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc36d7f75_c0e2_4884_9556_e2578247b813.slice/crio-f276713c0daec3258083dc92592d2f0e833df99f092dcad6543a8f56b29b7832 WatchSource:0}: Error finding container f276713c0daec3258083dc92592d2f0e833df99f092dcad6543a8f56b29b7832: Status 404 returned error can't find the container with id f276713c0daec3258083dc92592d2f0e833df99f092dcad6543a8f56b29b7832 Oct 07 15:07:40 crc kubenswrapper[4672]: I1007 15:07:40.017393 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-nvjxx" event={"ID":"4272a5a5-5cd2-4045-a1d7-735f5d8e3479","Type":"ContainerStarted","Data":"1967e6c443cf9821119746714fe41ace285f68e3e3a50e7f3a5014ba58d9984c"} Oct 07 15:07:40 crc kubenswrapper[4672]: I1007 15:07:40.018635 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8l95r" event={"ID":"5d130321-61d8-41eb-972e-c0a9b7eed5e2","Type":"ContainerStarted","Data":"09eac7f9b4d94358e2e48813a15621814aba85bef3e32431335b5b9f17eb91f7"} Oct 07 15:07:40 crc kubenswrapper[4672]: I1007 15:07:40.020379 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qw6h4" event={"ID":"c36d7f75-c0e2-4884-9556-e2578247b813","Type":"ContainerStarted","Data":"f276713c0daec3258083dc92592d2f0e833df99f092dcad6543a8f56b29b7832"} Oct 07 15:07:40 crc kubenswrapper[4672]: I1007 15:07:40.021598 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6fl7p" event={"ID":"303b2f6b-3e7b-4f25-b480-e2a74863215a","Type":"ContainerStarted","Data":"f8b60223341c2dfe46e0a38bed3d5f53a96c67ebbdbc8548a38de564f9d625aa"} Oct 07 15:07:40 crc kubenswrapper[4672]: I1007 15:07:40.021624 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6fl7p" event={"ID":"303b2f6b-3e7b-4f25-b480-e2a74863215a","Type":"ContainerStarted","Data":"f376e667890422002d970e6e0d6f260bc63b50bd5fe1bd43fc199dc45a1ce8bd"} Oct 07 15:07:40 crc kubenswrapper[4672]: I1007 15:07:40.037390 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-6fl7p" podStartSLOduration=2.037374466 podStartE2EDuration="2.037374466s" podCreationTimestamp="2025-10-07 15:07:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:07:40.034443232 +0000 UTC m=+1137.009621813" watchObservedRunningTime="2025-10-07 15:07:40.037374466 +0000 UTC m=+1137.012553037" Oct 07 15:07:41 crc kubenswrapper[4672]: I1007 15:07:41.036479 4672 generic.go:334] "Generic (PLEG): container finished" podID="4272a5a5-5cd2-4045-a1d7-735f5d8e3479" containerID="3e6b9268b83fa2d07c62ead3ad7808f8b1939e3e2a2460afe736e72e3c3cd463" exitCode=0 Oct 07 15:07:41 crc kubenswrapper[4672]: I1007 15:07:41.036554 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-nvjxx" event={"ID":"4272a5a5-5cd2-4045-a1d7-735f5d8e3479","Type":"ContainerDied","Data":"3e6b9268b83fa2d07c62ead3ad7808f8b1939e3e2a2460afe736e72e3c3cd463"} Oct 07 15:07:41 crc kubenswrapper[4672]: I1007 15:07:41.060267 4672 generic.go:334] "Generic (PLEG): container finished" podID="5d130321-61d8-41eb-972e-c0a9b7eed5e2" containerID="8ff9979e84c7327d81511b9865c08b4f9a7cadf02c1ae8e913ee3b5a9e999912" exitCode=0 Oct 07 15:07:41 crc kubenswrapper[4672]: I1007 15:07:41.060376 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8l95r" event={"ID":"5d130321-61d8-41eb-972e-c0a9b7eed5e2","Type":"ContainerDied","Data":"8ff9979e84c7327d81511b9865c08b4f9a7cadf02c1ae8e913ee3b5a9e999912"} Oct 07 15:07:41 crc kubenswrapper[4672]: I1007 15:07:41.067936 4672 generic.go:334] "Generic (PLEG): container finished" podID="303b2f6b-3e7b-4f25-b480-e2a74863215a" containerID="f8b60223341c2dfe46e0a38bed3d5f53a96c67ebbdbc8548a38de564f9d625aa" exitCode=0 Oct 07 15:07:41 crc kubenswrapper[4672]: I1007 15:07:41.068308 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6fl7p" event={"ID":"303b2f6b-3e7b-4f25-b480-e2a74863215a","Type":"ContainerDied","Data":"f8b60223341c2dfe46e0a38bed3d5f53a96c67ebbdbc8548a38de564f9d625aa"} Oct 07 15:07:42 crc kubenswrapper[4672]: I1007 15:07:42.604300 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:07:42 crc kubenswrapper[4672]: I1007 15:07:42.656156 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64d796cf9-nxb9q"] Oct 07 15:07:42 crc kubenswrapper[4672]: I1007 15:07:42.656442 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" podUID="58bd9bee-ec13-4d46-9a0a-2e94629ac059" containerName="dnsmasq-dns" containerID="cri-o://f34669468ae74494b5600e4486e7a3db912679745d16d58e1dca44aaf9ca8ed8" gracePeriod=10 Oct 07 15:07:43 crc kubenswrapper[4672]: I1007 15:07:43.087984 4672 generic.go:334] "Generic (PLEG): container finished" podID="58bd9bee-ec13-4d46-9a0a-2e94629ac059" containerID="f34669468ae74494b5600e4486e7a3db912679745d16d58e1dca44aaf9ca8ed8" exitCode=0 Oct 07 15:07:43 crc kubenswrapper[4672]: I1007 15:07:43.088043 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" event={"ID":"58bd9bee-ec13-4d46-9a0a-2e94629ac059","Type":"ContainerDied","Data":"f34669468ae74494b5600e4486e7a3db912679745d16d58e1dca44aaf9ca8ed8"} Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.422005 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8l95r" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.491057 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nvjxx" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.504032 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6fl7p" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.518104 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdrcr\" (UniqueName: \"kubernetes.io/projected/4272a5a5-5cd2-4045-a1d7-735f5d8e3479-kube-api-access-fdrcr\") pod \"4272a5a5-5cd2-4045-a1d7-735f5d8e3479\" (UID: \"4272a5a5-5cd2-4045-a1d7-735f5d8e3479\") " Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.518377 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnlhl\" (UniqueName: \"kubernetes.io/projected/5d130321-61d8-41eb-972e-c0a9b7eed5e2-kube-api-access-wnlhl\") pod \"5d130321-61d8-41eb-972e-c0a9b7eed5e2\" (UID: \"5d130321-61d8-41eb-972e-c0a9b7eed5e2\") " Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.527644 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d130321-61d8-41eb-972e-c0a9b7eed5e2-kube-api-access-wnlhl" (OuterVolumeSpecName: "kube-api-access-wnlhl") pod "5d130321-61d8-41eb-972e-c0a9b7eed5e2" (UID: "5d130321-61d8-41eb-972e-c0a9b7eed5e2"). InnerVolumeSpecName "kube-api-access-wnlhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.530169 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4272a5a5-5cd2-4045-a1d7-735f5d8e3479-kube-api-access-fdrcr" (OuterVolumeSpecName: "kube-api-access-fdrcr") pod "4272a5a5-5cd2-4045-a1d7-735f5d8e3479" (UID: "4272a5a5-5cd2-4045-a1d7-735f5d8e3479"). InnerVolumeSpecName "kube-api-access-fdrcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.623631 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8jmd\" (UniqueName: \"kubernetes.io/projected/303b2f6b-3e7b-4f25-b480-e2a74863215a-kube-api-access-z8jmd\") pod \"303b2f6b-3e7b-4f25-b480-e2a74863215a\" (UID: \"303b2f6b-3e7b-4f25-b480-e2a74863215a\") " Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.624058 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnlhl\" (UniqueName: \"kubernetes.io/projected/5d130321-61d8-41eb-972e-c0a9b7eed5e2-kube-api-access-wnlhl\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.624070 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdrcr\" (UniqueName: \"kubernetes.io/projected/4272a5a5-5cd2-4045-a1d7-735f5d8e3479-kube-api-access-fdrcr\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.630524 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/303b2f6b-3e7b-4f25-b480-e2a74863215a-kube-api-access-z8jmd" (OuterVolumeSpecName: "kube-api-access-z8jmd") pod "303b2f6b-3e7b-4f25-b480-e2a74863215a" (UID: "303b2f6b-3e7b-4f25-b480-e2a74863215a"). InnerVolumeSpecName "kube-api-access-z8jmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.681217 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.725400 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xccs9\" (UniqueName: \"kubernetes.io/projected/58bd9bee-ec13-4d46-9a0a-2e94629ac059-kube-api-access-xccs9\") pod \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.725883 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-dns-svc\") pod \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.726192 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-sb\") pod \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.726379 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-config\") pod \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.726580 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-nb\") pod \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\" (UID: \"58bd9bee-ec13-4d46-9a0a-2e94629ac059\") " Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.727201 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8jmd\" (UniqueName: \"kubernetes.io/projected/303b2f6b-3e7b-4f25-b480-e2a74863215a-kube-api-access-z8jmd\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.735954 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58bd9bee-ec13-4d46-9a0a-2e94629ac059-kube-api-access-xccs9" (OuterVolumeSpecName: "kube-api-access-xccs9") pod "58bd9bee-ec13-4d46-9a0a-2e94629ac059" (UID: "58bd9bee-ec13-4d46-9a0a-2e94629ac059"). InnerVolumeSpecName "kube-api-access-xccs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.769588 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "58bd9bee-ec13-4d46-9a0a-2e94629ac059" (UID: "58bd9bee-ec13-4d46-9a0a-2e94629ac059"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.771601 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "58bd9bee-ec13-4d46-9a0a-2e94629ac059" (UID: "58bd9bee-ec13-4d46-9a0a-2e94629ac059"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.775162 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "58bd9bee-ec13-4d46-9a0a-2e94629ac059" (UID: "58bd9bee-ec13-4d46-9a0a-2e94629ac059"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.785225 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-config" (OuterVolumeSpecName: "config") pod "58bd9bee-ec13-4d46-9a0a-2e94629ac059" (UID: "58bd9bee-ec13-4d46-9a0a-2e94629ac059"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.829084 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.829134 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.829149 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.829160 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/58bd9bee-ec13-4d46-9a0a-2e94629ac059-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:44 crc kubenswrapper[4672]: I1007 15:07:44.829176 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xccs9\" (UniqueName: \"kubernetes.io/projected/58bd9bee-ec13-4d46-9a0a-2e94629ac059-kube-api-access-xccs9\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.106192 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6fl7p" event={"ID":"303b2f6b-3e7b-4f25-b480-e2a74863215a","Type":"ContainerDied","Data":"f376e667890422002d970e6e0d6f260bc63b50bd5fe1bd43fc199dc45a1ce8bd"} Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.106254 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f376e667890422002d970e6e0d6f260bc63b50bd5fe1bd43fc199dc45a1ce8bd" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.106213 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6fl7p" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.108308 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-nvjxx" event={"ID":"4272a5a5-5cd2-4045-a1d7-735f5d8e3479","Type":"ContainerDied","Data":"1967e6c443cf9821119746714fe41ace285f68e3e3a50e7f3a5014ba58d9984c"} Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.108343 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1967e6c443cf9821119746714fe41ace285f68e3e3a50e7f3a5014ba58d9984c" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.108406 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nvjxx" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.112857 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8l95r" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.112898 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8l95r" event={"ID":"5d130321-61d8-41eb-972e-c0a9b7eed5e2","Type":"ContainerDied","Data":"09eac7f9b4d94358e2e48813a15621814aba85bef3e32431335b5b9f17eb91f7"} Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.112999 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09eac7f9b4d94358e2e48813a15621814aba85bef3e32431335b5b9f17eb91f7" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.116952 4672 generic.go:334] "Generic (PLEG): container finished" podID="c3158784-bab1-4e87-957c-cce0a7180f6f" containerID="cb3935663bce77023993755d95c01c007a5b46217b9dcbf39d52c4afca178b15" exitCode=0 Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.117051 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4bcjh" event={"ID":"c3158784-bab1-4e87-957c-cce0a7180f6f","Type":"ContainerDied","Data":"cb3935663bce77023993755d95c01c007a5b46217b9dcbf39d52c4afca178b15"} Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.123788 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" event={"ID":"58bd9bee-ec13-4d46-9a0a-2e94629ac059","Type":"ContainerDied","Data":"b61e384e325c149aef43f05673bc58b42d11ae2b53aa4038a9b85bad171f7fe8"} Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.123840 4672 scope.go:117] "RemoveContainer" containerID="f34669468ae74494b5600e4486e7a3db912679745d16d58e1dca44aaf9ca8ed8" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.123892 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64d796cf9-nxb9q" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.149461 4672 scope.go:117] "RemoveContainer" containerID="d6296fcf74afdd02c9a675ee65509d63d56b408346a407011825344d9f879d12" Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.181162 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64d796cf9-nxb9q"] Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.204496 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-64d796cf9-nxb9q"] Oct 07 15:07:45 crc kubenswrapper[4672]: I1007 15:07:45.901875 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58bd9bee-ec13-4d46-9a0a-2e94629ac059" path="/var/lib/kubelet/pods/58bd9bee-ec13-4d46-9a0a-2e94629ac059/volumes" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.132594 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qw6h4" event={"ID":"c36d7f75-c0e2-4884-9556-e2578247b813","Type":"ContainerStarted","Data":"923183dbf95f906c6d05b13729d7d3332cb2a0a1035bd95fbf82228580824632"} Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.152114 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-qw6h4" podStartSLOduration=2.166625901 podStartE2EDuration="7.15209594s" podCreationTimestamp="2025-10-07 15:07:39 +0000 UTC" firstStartedPulling="2025-10-07 15:07:40.009712692 +0000 UTC m=+1136.984891273" lastFinishedPulling="2025-10-07 15:07:44.995182741 +0000 UTC m=+1141.970361312" observedRunningTime="2025-10-07 15:07:46.151584895 +0000 UTC m=+1143.126763476" watchObservedRunningTime="2025-10-07 15:07:46.15209594 +0000 UTC m=+1143.127274521" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.474614 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.553535 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-config-data\") pod \"c3158784-bab1-4e87-957c-cce0a7180f6f\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.554047 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cn47p\" (UniqueName: \"kubernetes.io/projected/c3158784-bab1-4e87-957c-cce0a7180f6f-kube-api-access-cn47p\") pod \"c3158784-bab1-4e87-957c-cce0a7180f6f\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.554101 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-db-sync-config-data\") pod \"c3158784-bab1-4e87-957c-cce0a7180f6f\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.554168 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-combined-ca-bundle\") pod \"c3158784-bab1-4e87-957c-cce0a7180f6f\" (UID: \"c3158784-bab1-4e87-957c-cce0a7180f6f\") " Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.559126 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c3158784-bab1-4e87-957c-cce0a7180f6f" (UID: "c3158784-bab1-4e87-957c-cce0a7180f6f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.559746 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3158784-bab1-4e87-957c-cce0a7180f6f-kube-api-access-cn47p" (OuterVolumeSpecName: "kube-api-access-cn47p") pod "c3158784-bab1-4e87-957c-cce0a7180f6f" (UID: "c3158784-bab1-4e87-957c-cce0a7180f6f"). InnerVolumeSpecName "kube-api-access-cn47p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.578001 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3158784-bab1-4e87-957c-cce0a7180f6f" (UID: "c3158784-bab1-4e87-957c-cce0a7180f6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.595789 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-config-data" (OuterVolumeSpecName: "config-data") pod "c3158784-bab1-4e87-957c-cce0a7180f6f" (UID: "c3158784-bab1-4e87-957c-cce0a7180f6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.655939 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.655973 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cn47p\" (UniqueName: \"kubernetes.io/projected/c3158784-bab1-4e87-957c-cce0a7180f6f-kube-api-access-cn47p\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.655984 4672 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:46 crc kubenswrapper[4672]: I1007 15:07:46.655994 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3158784-bab1-4e87-957c-cce0a7180f6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.144062 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4bcjh" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.144060 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4bcjh" event={"ID":"c3158784-bab1-4e87-957c-cce0a7180f6f","Type":"ContainerDied","Data":"9f0c887bcbb9dafa770b43dfc98a1d4b67cea95a95ac7cbe36119fd6591f2954"} Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.144218 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f0c887bcbb9dafa770b43dfc98a1d4b67cea95a95ac7cbe36119fd6591f2954" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505248 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5956c4d589-5xpxg"] Oct 07 15:07:47 crc kubenswrapper[4672]: E1007 15:07:47.505571 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4272a5a5-5cd2-4045-a1d7-735f5d8e3479" containerName="mariadb-database-create" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505587 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4272a5a5-5cd2-4045-a1d7-735f5d8e3479" containerName="mariadb-database-create" Oct 07 15:07:47 crc kubenswrapper[4672]: E1007 15:07:47.505601 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303b2f6b-3e7b-4f25-b480-e2a74863215a" containerName="mariadb-database-create" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505608 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="303b2f6b-3e7b-4f25-b480-e2a74863215a" containerName="mariadb-database-create" Oct 07 15:07:47 crc kubenswrapper[4672]: E1007 15:07:47.505624 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d130321-61d8-41eb-972e-c0a9b7eed5e2" containerName="mariadb-database-create" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505632 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d130321-61d8-41eb-972e-c0a9b7eed5e2" containerName="mariadb-database-create" Oct 07 15:07:47 crc kubenswrapper[4672]: E1007 15:07:47.505652 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58bd9bee-ec13-4d46-9a0a-2e94629ac059" containerName="dnsmasq-dns" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505659 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="58bd9bee-ec13-4d46-9a0a-2e94629ac059" containerName="dnsmasq-dns" Oct 07 15:07:47 crc kubenswrapper[4672]: E1007 15:07:47.505673 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58bd9bee-ec13-4d46-9a0a-2e94629ac059" containerName="init" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505678 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="58bd9bee-ec13-4d46-9a0a-2e94629ac059" containerName="init" Oct 07 15:07:47 crc kubenswrapper[4672]: E1007 15:07:47.505692 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3158784-bab1-4e87-957c-cce0a7180f6f" containerName="glance-db-sync" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505698 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3158784-bab1-4e87-957c-cce0a7180f6f" containerName="glance-db-sync" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505834 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="303b2f6b-3e7b-4f25-b480-e2a74863215a" containerName="mariadb-database-create" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505846 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3158784-bab1-4e87-957c-cce0a7180f6f" containerName="glance-db-sync" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505856 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="58bd9bee-ec13-4d46-9a0a-2e94629ac059" containerName="dnsmasq-dns" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505869 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d130321-61d8-41eb-972e-c0a9b7eed5e2" containerName="mariadb-database-create" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.505888 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="4272a5a5-5cd2-4045-a1d7-735f5d8e3479" containerName="mariadb-database-create" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.506723 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.521933 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5956c4d589-5xpxg"] Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.570977 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdx7r\" (UniqueName: \"kubernetes.io/projected/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-kube-api-access-gdx7r\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.571305 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-config\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.571333 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-swift-storage-0\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.571354 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-nb\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.571376 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-svc\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.571524 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-sb\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.672936 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-sb\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.672992 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdx7r\" (UniqueName: \"kubernetes.io/projected/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-kube-api-access-gdx7r\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.673040 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-config\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.673061 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-swift-storage-0\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.673079 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-nb\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.673099 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-svc\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.673955 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-sb\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.673962 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-svc\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.674553 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-swift-storage-0\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.674822 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-nb\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.675141 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-config\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.707710 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdx7r\" (UniqueName: \"kubernetes.io/projected/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-kube-api-access-gdx7r\") pod \"dnsmasq-dns-5956c4d589-5xpxg\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:47 crc kubenswrapper[4672]: I1007 15:07:47.829650 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:48 crc kubenswrapper[4672]: I1007 15:07:48.284582 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5956c4d589-5xpxg"] Oct 07 15:07:49 crc kubenswrapper[4672]: I1007 15:07:49.162333 4672 generic.go:334] "Generic (PLEG): container finished" podID="c36d7f75-c0e2-4884-9556-e2578247b813" containerID="923183dbf95f906c6d05b13729d7d3332cb2a0a1035bd95fbf82228580824632" exitCode=0 Oct 07 15:07:49 crc kubenswrapper[4672]: I1007 15:07:49.162414 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qw6h4" event={"ID":"c36d7f75-c0e2-4884-9556-e2578247b813","Type":"ContainerDied","Data":"923183dbf95f906c6d05b13729d7d3332cb2a0a1035bd95fbf82228580824632"} Oct 07 15:07:49 crc kubenswrapper[4672]: I1007 15:07:49.164722 4672 generic.go:334] "Generic (PLEG): container finished" podID="f951f2f3-1aa4-43c8-a436-6dc3639c02e2" containerID="1db14ce484d921203099abc86df34696c9a06cead62001ce78f30fe1628bafb7" exitCode=0 Oct 07 15:07:49 crc kubenswrapper[4672]: I1007 15:07:49.164762 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" event={"ID":"f951f2f3-1aa4-43c8-a436-6dc3639c02e2","Type":"ContainerDied","Data":"1db14ce484d921203099abc86df34696c9a06cead62001ce78f30fe1628bafb7"} Oct 07 15:07:49 crc kubenswrapper[4672]: I1007 15:07:49.164786 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" event={"ID":"f951f2f3-1aa4-43c8-a436-6dc3639c02e2","Type":"ContainerStarted","Data":"6bbfa3f68e225721df5f52ca71d519c36d1b09121a59d9c08753e1596ae0defc"} Oct 07 15:07:50 crc kubenswrapper[4672]: I1007 15:07:50.172842 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" event={"ID":"f951f2f3-1aa4-43c8-a436-6dc3639c02e2","Type":"ContainerStarted","Data":"5f7f4b20d118fbf5f23419fd6d90274d7e106dcf3d75e0c52633743385184dcc"} Oct 07 15:07:50 crc kubenswrapper[4672]: I1007 15:07:50.203266 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" podStartSLOduration=3.203245076 podStartE2EDuration="3.203245076s" podCreationTimestamp="2025-10-07 15:07:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:07:50.198207621 +0000 UTC m=+1147.173386202" watchObservedRunningTime="2025-10-07 15:07:50.203245076 +0000 UTC m=+1147.178423657" Oct 07 15:07:51 crc kubenswrapper[4672]: I1007 15:07:51.180028 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:51 crc kubenswrapper[4672]: I1007 15:07:51.887181 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:51 crc kubenswrapper[4672]: I1007 15:07:51.950798 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-config-data\") pod \"c36d7f75-c0e2-4884-9556-e2578247b813\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " Oct 07 15:07:51 crc kubenswrapper[4672]: I1007 15:07:51.951202 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxdcv\" (UniqueName: \"kubernetes.io/projected/c36d7f75-c0e2-4884-9556-e2578247b813-kube-api-access-qxdcv\") pod \"c36d7f75-c0e2-4884-9556-e2578247b813\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " Oct 07 15:07:51 crc kubenswrapper[4672]: I1007 15:07:51.951352 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-combined-ca-bundle\") pod \"c36d7f75-c0e2-4884-9556-e2578247b813\" (UID: \"c36d7f75-c0e2-4884-9556-e2578247b813\") " Oct 07 15:07:51 crc kubenswrapper[4672]: I1007 15:07:51.960387 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c36d7f75-c0e2-4884-9556-e2578247b813-kube-api-access-qxdcv" (OuterVolumeSpecName: "kube-api-access-qxdcv") pod "c36d7f75-c0e2-4884-9556-e2578247b813" (UID: "c36d7f75-c0e2-4884-9556-e2578247b813"). InnerVolumeSpecName "kube-api-access-qxdcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:51 crc kubenswrapper[4672]: I1007 15:07:51.981182 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c36d7f75-c0e2-4884-9556-e2578247b813" (UID: "c36d7f75-c0e2-4884-9556-e2578247b813"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:07:52 crc kubenswrapper[4672]: I1007 15:07:52.004436 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-config-data" (OuterVolumeSpecName: "config-data") pod "c36d7f75-c0e2-4884-9556-e2578247b813" (UID: "c36d7f75-c0e2-4884-9556-e2578247b813"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:07:52 crc kubenswrapper[4672]: I1007 15:07:52.052700 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:52 crc kubenswrapper[4672]: I1007 15:07:52.052742 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c36d7f75-c0e2-4884-9556-e2578247b813-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:52 crc kubenswrapper[4672]: I1007 15:07:52.052752 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxdcv\" (UniqueName: \"kubernetes.io/projected/c36d7f75-c0e2-4884-9556-e2578247b813-kube-api-access-qxdcv\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:52 crc kubenswrapper[4672]: I1007 15:07:52.190044 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qw6h4" event={"ID":"c36d7f75-c0e2-4884-9556-e2578247b813","Type":"ContainerDied","Data":"f276713c0daec3258083dc92592d2f0e833df99f092dcad6543a8f56b29b7832"} Oct 07 15:07:52 crc kubenswrapper[4672]: I1007 15:07:52.190097 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f276713c0daec3258083dc92592d2f0e833df99f092dcad6543a8f56b29b7832" Oct 07 15:07:52 crc kubenswrapper[4672]: I1007 15:07:52.190104 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qw6h4" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.136916 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5956c4d589-5xpxg"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.178975 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d4f57bc5-kfvm7"] Oct 07 15:07:53 crc kubenswrapper[4672]: E1007 15:07:53.179425 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36d7f75-c0e2-4884-9556-e2578247b813" containerName="keystone-db-sync" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.179448 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36d7f75-c0e2-4884-9556-e2578247b813" containerName="keystone-db-sync" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.179696 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="c36d7f75-c0e2-4884-9556-e2578247b813" containerName="keystone-db-sync" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.180746 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.197830 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" podUID="f951f2f3-1aa4-43c8-a436-6dc3639c02e2" containerName="dnsmasq-dns" containerID="cri-o://5f7f4b20d118fbf5f23419fd6d90274d7e106dcf3d75e0c52633743385184dcc" gracePeriod=10 Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.206644 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-6q2nt"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.207830 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.217400 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-rtlqr" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.217591 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.217756 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.225617 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.229845 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d4f57bc5-kfvm7"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.271799 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-sb\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.271890 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jsch\" (UniqueName: \"kubernetes.io/projected/2425fa8e-b1c1-4289-8b0b-a624d5489917-kube-api-access-9jsch\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.271922 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-config-data\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.271962 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-combined-ca-bundle\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.271991 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-scripts\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.272032 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-nb\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.272086 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-credential-keys\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.272108 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-swift-storage-0\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.272141 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njf9f\" (UniqueName: \"kubernetes.io/projected/e9bd020e-9e5a-4cef-a33d-e8b254545d46-kube-api-access-njf9f\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.272166 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-config\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.272242 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-svc\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.272269 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-fernet-keys\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.302065 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6q2nt"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.372140 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5fcb5df989-stdqv"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.372919 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-config-data\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373055 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-combined-ca-bundle\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373189 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-scripts\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373280 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-nb\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373374 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-credential-keys\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373462 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-swift-storage-0\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373557 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njf9f\" (UniqueName: \"kubernetes.io/projected/e9bd020e-9e5a-4cef-a33d-e8b254545d46-kube-api-access-njf9f\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373649 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-config\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373773 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-svc\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373866 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-fernet-keys\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.373965 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-sb\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.374107 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jsch\" (UniqueName: \"kubernetes.io/projected/2425fa8e-b1c1-4289-8b0b-a624d5489917-kube-api-access-9jsch\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.375438 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-swift-storage-0\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.376139 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-svc\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.377070 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-config\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.378791 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-nb\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.379458 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-sb\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.387302 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.388548 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-fernet-keys\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.388808 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-credential-keys\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.398686 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.399332 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-7d4pf" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.399675 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.399835 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.407923 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5fcb5df989-stdqv"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.407983 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-config-data\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.409056 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-combined-ca-bundle\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.409477 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-scripts\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.410113 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njf9f\" (UniqueName: \"kubernetes.io/projected/e9bd020e-9e5a-4cef-a33d-e8b254545d46-kube-api-access-njf9f\") pod \"keystone-bootstrap-6q2nt\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.426004 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jsch\" (UniqueName: \"kubernetes.io/projected/2425fa8e-b1c1-4289-8b0b-a624d5489917-kube-api-access-9jsch\") pod \"dnsmasq-dns-59d4f57bc5-kfvm7\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.477364 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-logs\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.477658 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-scripts\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.477935 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk9ll\" (UniqueName: \"kubernetes.io/projected/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-kube-api-access-jk9ll\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.478107 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-horizon-secret-key\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.478200 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-config-data\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.513847 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.545646 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-66c969bf9c-mwvhc"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.551100 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.609186 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.631544 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-horizon-secret-key\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.631614 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-config-data\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.631806 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-logs\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.631834 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-scripts\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.631917 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk9ll\" (UniqueName: \"kubernetes.io/projected/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-kube-api-access-jk9ll\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.638847 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-logs\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.639628 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-scripts\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.646215 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-66c969bf9c-mwvhc"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.646600 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-config-data\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.651506 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-horizon-secret-key\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.671498 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk9ll\" (UniqueName: \"kubernetes.io/projected/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-kube-api-access-jk9ll\") pod \"horizon-5fcb5df989-stdqv\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.684688 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.693106 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.712468 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.713162 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.715379 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.773845 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.776009 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.784911 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.785003 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-x7z9z" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.785367 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.785465 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.791350 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.800492 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d4f57bc5-kfvm7"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.810124 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-lzmcv"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.811585 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.821496 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-hb27h" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.821676 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.822706 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.841695 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd59v\" (UniqueName: \"kubernetes.io/projected/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-kube-api-access-rd59v\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.841966 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842080 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-config-data\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842161 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eff0ec-546c-4c65-95da-51a499040af2-logs\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842266 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/05eff0ec-546c-4c65-95da-51a499040af2-horizon-secret-key\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842345 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-log-httpd\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842471 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842578 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-config-data\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842649 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-scripts\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842765 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-scripts\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842874 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cjlb\" (UniqueName: \"kubernetes.io/projected/05eff0ec-546c-4c65-95da-51a499040af2-kube-api-access-5cjlb\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.842970 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-run-httpd\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.843216 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-lzmcv"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.849573 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.887396 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.896262 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.905787 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.910283 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951004 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-config-data\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951067 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-logs\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951088 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eff0ec-546c-4c65-95da-51a499040af2-logs\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951128 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/05eff0ec-546c-4c65-95da-51a499040af2-horizon-secret-key\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951147 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-config-data\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951165 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-config-data\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951209 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm9db\" (UniqueName: \"kubernetes.io/projected/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-kube-api-access-pm9db\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951230 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-log-httpd\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951258 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951280 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951300 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951317 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-scripts\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951342 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-scripts\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951361 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-logs\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951382 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-combined-ca-bundle\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951403 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951423 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-scripts\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951440 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-scripts\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951456 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-config-data\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951473 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951495 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8g8b\" (UniqueName: \"kubernetes.io/projected/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-kube-api-access-l8g8b\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951515 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cjlb\" (UniqueName: \"kubernetes.io/projected/05eff0ec-546c-4c65-95da-51a499040af2-kube-api-access-5cjlb\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951540 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-run-httpd\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951563 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.951577 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd59v\" (UniqueName: \"kubernetes.io/projected/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-kube-api-access-rd59v\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.953463 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-log-httpd\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.953863 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-run-httpd\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.956738 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-scripts\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.957369 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-config-data\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.964217 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.964250 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-794bfdbb9f-mw8qf"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.966134 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.974908 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-794bfdbb9f-mw8qf"] Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.978588 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-scripts\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.984871 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-config-data\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.985246 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.990745 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.995737 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/05eff0ec-546c-4c65-95da-51a499040af2-horizon-secret-key\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:53 crc kubenswrapper[4672]: I1007 15:07:53.997110 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eff0ec-546c-4c65-95da-51a499040af2-logs\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.001557 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cjlb\" (UniqueName: \"kubernetes.io/projected/05eff0ec-546c-4c65-95da-51a499040af2-kube-api-access-5cjlb\") pod \"horizon-66c969bf9c-mwvhc\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.019840 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd59v\" (UniqueName: \"kubernetes.io/projected/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-kube-api-access-rd59v\") pod \"ceilometer-0\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " pod="openstack/ceilometer-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.051464 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052723 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052750 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052772 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052793 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-scripts\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052808 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-scripts\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052823 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6ztm\" (UniqueName: \"kubernetes.io/projected/f9e59740-c3a1-48d6-847a-853862002d42-kube-api-access-w6ztm\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052840 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-logs\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052876 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-config\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052914 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052935 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-combined-ca-bundle\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052959 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.052976 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053029 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053052 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8g8b\" (UniqueName: \"kubernetes.io/projected/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-kube-api-access-l8g8b\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053077 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053100 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053148 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-nb\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053168 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053195 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053224 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-logs\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053241 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-sb\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053261 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-swift-storage-0\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053284 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-config-data\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053304 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-config-data\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053322 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm9db\" (UniqueName: \"kubernetes.io/projected/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-kube-api-access-pm9db\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053340 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8qth\" (UniqueName: \"kubernetes.io/projected/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-kube-api-access-p8qth\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053377 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-svc\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.053616 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.062995 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.065336 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-logs\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.074852 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-logs\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.076862 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.077875 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.085767 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-scripts\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.086608 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-config-data\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.086918 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-scripts\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.098010 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-config-data\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.100765 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.126707 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm9db\" (UniqueName: \"kubernetes.io/projected/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-kube-api-access-pm9db\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.132162 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8g8b\" (UniqueName: \"kubernetes.io/projected/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-kube-api-access-l8g8b\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.154479 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-svc\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.155193 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.155331 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6ztm\" (UniqueName: \"kubernetes.io/projected/f9e59740-c3a1-48d6-847a-853862002d42-kube-api-access-w6ztm\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.155437 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-config\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.155551 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.155670 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.155807 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.155882 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.155971 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-nb\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.156223 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.156315 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.156440 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-sb\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.156526 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-swift-storage-0\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.156635 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8qth\" (UniqueName: \"kubernetes.io/projected/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-kube-api-access-p8qth\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.157931 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.159860 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-sb\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.165336 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-nb\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.155615 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-svc\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.165589 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.165737 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.172615 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-swift-storage-0\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.173478 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-config\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.184728 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.185589 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.201276 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.204437 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-combined-ca-bundle\") pod \"placement-db-sync-lzmcv\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.205670 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6ztm\" (UniqueName: \"kubernetes.io/projected/f9e59740-c3a1-48d6-847a-853862002d42-kube-api-access-w6ztm\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.210779 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8qth\" (UniqueName: \"kubernetes.io/projected/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-kube-api-access-p8qth\") pod \"dnsmasq-dns-794bfdbb9f-mw8qf\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.211482 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.225429 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.225466 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.234057 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" event={"ID":"f951f2f3-1aa4-43c8-a436-6dc3639c02e2","Type":"ContainerDied","Data":"5f7f4b20d118fbf5f23419fd6d90274d7e106dcf3d75e0c52633743385184dcc"} Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.241306 4672 generic.go:334] "Generic (PLEG): container finished" podID="f951f2f3-1aa4-43c8-a436-6dc3639c02e2" containerID="5f7f4b20d118fbf5f23419fd6d90274d7e106dcf3d75e0c52633743385184dcc" exitCode=0 Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.252764 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.325419 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.416656 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.468763 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lzmcv" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.505443 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6q2nt"] Oct 07 15:07:54 crc kubenswrapper[4672]: W1007 15:07:54.584595 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9bd020e_9e5a_4cef_a33d_e8b254545d46.slice/crio-71e8210bd5667aa6fde03c6994996a2888782ec18ac067ef77662b09b2bacf9d WatchSource:0}: Error finding container 71e8210bd5667aa6fde03c6994996a2888782ec18ac067ef77662b09b2bacf9d: Status 404 returned error can't find the container with id 71e8210bd5667aa6fde03c6994996a2888782ec18ac067ef77662b09b2bacf9d Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.747677 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.776252 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-sb\") pod \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.776315 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-nb\") pod \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.776425 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-svc\") pod \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.776520 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-swift-storage-0\") pod \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.776561 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdx7r\" (UniqueName: \"kubernetes.io/projected/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-kube-api-access-gdx7r\") pod \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.776648 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-config\") pod \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\" (UID: \"f951f2f3-1aa4-43c8-a436-6dc3639c02e2\") " Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.804997 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5fcb5df989-stdqv"] Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.814982 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d4f57bc5-kfvm7"] Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.823260 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-kube-api-access-gdx7r" (OuterVolumeSpecName: "kube-api-access-gdx7r") pod "f951f2f3-1aa4-43c8-a436-6dc3639c02e2" (UID: "f951f2f3-1aa4-43c8-a436-6dc3639c02e2"). InnerVolumeSpecName "kube-api-access-gdx7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.879254 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdx7r\" (UniqueName: \"kubernetes.io/projected/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-kube-api-access-gdx7r\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.903539 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f951f2f3-1aa4-43c8-a436-6dc3639c02e2" (UID: "f951f2f3-1aa4-43c8-a436-6dc3639c02e2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.923567 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f951f2f3-1aa4-43c8-a436-6dc3639c02e2" (UID: "f951f2f3-1aa4-43c8-a436-6dc3639c02e2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.936231 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f951f2f3-1aa4-43c8-a436-6dc3639c02e2" (UID: "f951f2f3-1aa4-43c8-a436-6dc3639c02e2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.936273 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f951f2f3-1aa4-43c8-a436-6dc3639c02e2" (UID: "f951f2f3-1aa4-43c8-a436-6dc3639c02e2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.958205 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-config" (OuterVolumeSpecName: "config") pod "f951f2f3-1aa4-43c8-a436-6dc3639c02e2" (UID: "f951f2f3-1aa4-43c8-a436-6dc3639c02e2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.980486 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.980519 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.980530 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.980539 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:54 crc kubenswrapper[4672]: I1007 15:07:54.980547 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f951f2f3-1aa4-43c8-a436-6dc3639c02e2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.041858 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-66c969bf9c-mwvhc"] Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.085841 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:07:55 crc kubenswrapper[4672]: W1007 15:07:55.114868 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f15bf66_1b2a_4084_8e99_33ed3d5e8f41.slice/crio-52d245b453b540116553eae84be7ce060fada39be100f48663ed5c8e0b562562 WatchSource:0}: Error finding container 52d245b453b540116553eae84be7ce060fada39be100f48663ed5c8e0b562562: Status 404 returned error can't find the container with id 52d245b453b540116553eae84be7ce060fada39be100f48663ed5c8e0b562562 Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.226106 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-794bfdbb9f-mw8qf"] Oct 07 15:07:55 crc kubenswrapper[4672]: W1007 15:07:55.239830 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabc5aab9_1196_4fac_9d39_d44d60a0a6d5.slice/crio-70ff148092a7ba384122ed9604f6b437f9dd0e2983d9eabcb589685335dfefcc WatchSource:0}: Error finding container 70ff148092a7ba384122ed9604f6b437f9dd0e2983d9eabcb589685335dfefcc: Status 404 returned error can't find the container with id 70ff148092a7ba384122ed9604f6b437f9dd0e2983d9eabcb589685335dfefcc Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.257586 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5fcb5df989-stdqv" event={"ID":"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b","Type":"ContainerStarted","Data":"f5c0c1a9e5e15a611be7ca9779d220eba85615361ea0152b8c0eec1968a285cf"} Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.261914 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" event={"ID":"f951f2f3-1aa4-43c8-a436-6dc3639c02e2","Type":"ContainerDied","Data":"6bbfa3f68e225721df5f52ca71d519c36d1b09121a59d9c08753e1596ae0defc"} Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.261948 4672 scope.go:117] "RemoveContainer" containerID="5f7f4b20d118fbf5f23419fd6d90274d7e106dcf3d75e0c52633743385184dcc" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.262066 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5956c4d589-5xpxg" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.265439 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41","Type":"ContainerStarted","Data":"52d245b453b540116553eae84be7ce060fada39be100f48663ed5c8e0b562562"} Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.266286 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66c969bf9c-mwvhc" event={"ID":"05eff0ec-546c-4c65-95da-51a499040af2","Type":"ContainerStarted","Data":"4a2d0764c161b0e6c1a0d5824f3ac115553a36000c2ef3aa5f76c408282def99"} Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.270306 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6q2nt" event={"ID":"e9bd020e-9e5a-4cef-a33d-e8b254545d46","Type":"ContainerStarted","Data":"58690f3b57da320b8f4efe3215a4333c170fd1ebefa254e9a59e3db1090842ad"} Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.270342 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6q2nt" event={"ID":"e9bd020e-9e5a-4cef-a33d-e8b254545d46","Type":"ContainerStarted","Data":"71e8210bd5667aa6fde03c6994996a2888782ec18ac067ef77662b09b2bacf9d"} Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.275758 4672 generic.go:334] "Generic (PLEG): container finished" podID="2425fa8e-b1c1-4289-8b0b-a624d5489917" containerID="442084b4c4a812c8018c31a59495b3e3480c47faec8ea8ea9afd64b67592962b" exitCode=0 Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.276159 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" event={"ID":"2425fa8e-b1c1-4289-8b0b-a624d5489917","Type":"ContainerDied","Data":"442084b4c4a812c8018c31a59495b3e3480c47faec8ea8ea9afd64b67592962b"} Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.276206 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" event={"ID":"2425fa8e-b1c1-4289-8b0b-a624d5489917","Type":"ContainerStarted","Data":"48c5e0d1c43954543e66616c2f89364ede68038b41059209384077b6f785fab6"} Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.278279 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" event={"ID":"abc5aab9-1196-4fac-9d39-d44d60a0a6d5","Type":"ContainerStarted","Data":"70ff148092a7ba384122ed9604f6b437f9dd0e2983d9eabcb589685335dfefcc"} Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.293241 4672 scope.go:117] "RemoveContainer" containerID="1db14ce484d921203099abc86df34696c9a06cead62001ce78f30fe1628bafb7" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.324373 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-6q2nt" podStartSLOduration=2.324353791 podStartE2EDuration="2.324353791s" podCreationTimestamp="2025-10-07 15:07:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:07:55.293377291 +0000 UTC m=+1152.268555872" watchObservedRunningTime="2025-10-07 15:07:55.324353791 +0000 UTC m=+1152.299532372" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.364192 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5956c4d589-5xpxg"] Oct 07 15:07:55 crc kubenswrapper[4672]: W1007 15:07:55.372939 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b037c52_40e8_43b9_8e8c_51a19c6f1d8c.slice/crio-97f5ed83f5affd04b27c36f47ffc36f17c1339f6ec07e5a75b90d23177efaada WatchSource:0}: Error finding container 97f5ed83f5affd04b27c36f47ffc36f17c1339f6ec07e5a75b90d23177efaada: Status 404 returned error can't find the container with id 97f5ed83f5affd04b27c36f47ffc36f17c1339f6ec07e5a75b90d23177efaada Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.376311 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5956c4d589-5xpxg"] Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.390920 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.403547 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-lzmcv"] Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.677014 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.719670 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5fcb5df989-stdqv"] Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.744792 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.767138 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-796c559987-ttpm4"] Oct 07 15:07:55 crc kubenswrapper[4672]: E1007 15:07:55.767508 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f951f2f3-1aa4-43c8-a436-6dc3639c02e2" containerName="init" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.767521 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f951f2f3-1aa4-43c8-a436-6dc3639c02e2" containerName="init" Oct 07 15:07:55 crc kubenswrapper[4672]: E1007 15:07:55.767553 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f951f2f3-1aa4-43c8-a436-6dc3639c02e2" containerName="dnsmasq-dns" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.767559 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f951f2f3-1aa4-43c8-a436-6dc3639c02e2" containerName="dnsmasq-dns" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.769271 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f951f2f3-1aa4-43c8-a436-6dc3639c02e2" containerName="dnsmasq-dns" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.771974 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.783578 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-796c559987-ttpm4"] Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.810625 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.910446 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f951f2f3-1aa4-43c8-a436-6dc3639c02e2" path="/var/lib/kubelet/pods/f951f2f3-1aa4-43c8-a436-6dc3639c02e2/volumes" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.912606 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.921930 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-horizon-secret-key\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.921988 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpz7x\" (UniqueName: \"kubernetes.io/projected/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-kube-api-access-vpz7x\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.922059 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-logs\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.922098 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-scripts\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:55 crc kubenswrapper[4672]: I1007 15:07:55.922122 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-config-data\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:56 crc kubenswrapper[4672]: I1007 15:07:56.024468 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-swift-storage-0\") pod \"2425fa8e-b1c1-4289-8b0b-a624d5489917\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " Oct 07 15:07:56 crc kubenswrapper[4672]: I1007 15:07:56.024547 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-nb\") pod \"2425fa8e-b1c1-4289-8b0b-a624d5489917\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.025479 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-svc\") pod \"2425fa8e-b1c1-4289-8b0b-a624d5489917\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.025538 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jsch\" (UniqueName: \"kubernetes.io/projected/2425fa8e-b1c1-4289-8b0b-a624d5489917-kube-api-access-9jsch\") pod \"2425fa8e-b1c1-4289-8b0b-a624d5489917\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.025834 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-config\") pod \"2425fa8e-b1c1-4289-8b0b-a624d5489917\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.025863 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-sb\") pod \"2425fa8e-b1c1-4289-8b0b-a624d5489917\" (UID: \"2425fa8e-b1c1-4289-8b0b-a624d5489917\") " Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.036619 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-scripts\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.036718 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-config-data\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.036904 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-horizon-secret-key\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.036996 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpz7x\" (UniqueName: \"kubernetes.io/projected/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-kube-api-access-vpz7x\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.037143 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-logs\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.037772 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-logs\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.038451 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-scripts\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.039517 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-config-data\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.044568 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2425fa8e-b1c1-4289-8b0b-a624d5489917-kube-api-access-9jsch" (OuterVolumeSpecName: "kube-api-access-9jsch") pod "2425fa8e-b1c1-4289-8b0b-a624d5489917" (UID: "2425fa8e-b1c1-4289-8b0b-a624d5489917"). InnerVolumeSpecName "kube-api-access-9jsch". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.047100 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-horizon-secret-key\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.054922 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.073458 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpz7x\" (UniqueName: \"kubernetes.io/projected/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-kube-api-access-vpz7x\") pod \"horizon-796c559987-ttpm4\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.085407 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2425fa8e-b1c1-4289-8b0b-a624d5489917" (UID: "2425fa8e-b1c1-4289-8b0b-a624d5489917"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.091741 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2425fa8e-b1c1-4289-8b0b-a624d5489917" (UID: "2425fa8e-b1c1-4289-8b0b-a624d5489917"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.102516 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-config" (OuterVolumeSpecName: "config") pod "2425fa8e-b1c1-4289-8b0b-a624d5489917" (UID: "2425fa8e-b1c1-4289-8b0b-a624d5489917"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.116720 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2425fa8e-b1c1-4289-8b0b-a624d5489917" (UID: "2425fa8e-b1c1-4289-8b0b-a624d5489917"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.118008 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2425fa8e-b1c1-4289-8b0b-a624d5489917" (UID: "2425fa8e-b1c1-4289-8b0b-a624d5489917"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.142186 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.142220 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.142236 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.142247 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jsch\" (UniqueName: \"kubernetes.io/projected/2425fa8e-b1c1-4289-8b0b-a624d5489917-kube-api-access-9jsch\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.142259 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.142302 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2425fa8e-b1c1-4289-8b0b-a624d5489917-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.205904 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.301611 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lzmcv" event={"ID":"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017","Type":"ContainerStarted","Data":"e397c78d10d3edf75ff896d5070aa79bd0bfe32a95cc95743b15528bb2383525"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.305184 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9e59740-c3a1-48d6-847a-853862002d42","Type":"ContainerStarted","Data":"4e7df20ae7ce552b928698f56592d252333dd010434b161dd49687ac90885209"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.309784 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c","Type":"ContainerStarted","Data":"97f5ed83f5affd04b27c36f47ffc36f17c1339f6ec07e5a75b90d23177efaada"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.313612 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.313620 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d4f57bc5-kfvm7" event={"ID":"2425fa8e-b1c1-4289-8b0b-a624d5489917","Type":"ContainerDied","Data":"48c5e0d1c43954543e66616c2f89364ede68038b41059209384077b6f785fab6"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.313685 4672 scope.go:117] "RemoveContainer" containerID="442084b4c4a812c8018c31a59495b3e3480c47faec8ea8ea9afd64b67592962b" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.319452 4672 generic.go:334] "Generic (PLEG): container finished" podID="abc5aab9-1196-4fac-9d39-d44d60a0a6d5" containerID="077be3ff27cc1902958dfaad595be5a0207e0b1a43ceb70620b89cfc79b02133" exitCode=0 Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.320180 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" event={"ID":"abc5aab9-1196-4fac-9d39-d44d60a0a6d5","Type":"ContainerDied","Data":"077be3ff27cc1902958dfaad595be5a0207e0b1a43ceb70620b89cfc79b02133"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.650405 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.650691 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.650729 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.651343 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8e961035363bda8e811ffc1c695a7f2039c7606ff629f74fa31612bebfcadced"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.651327 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d4f57bc5-kfvm7"] Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.651386 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://8e961035363bda8e811ffc1c695a7f2039c7606ff629f74fa31612bebfcadced" gracePeriod=600 Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:56.657081 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d4f57bc5-kfvm7"] Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.347450 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="8e961035363bda8e811ffc1c695a7f2039c7606ff629f74fa31612bebfcadced" exitCode=0 Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.347546 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"8e961035363bda8e811ffc1c695a7f2039c7606ff629f74fa31612bebfcadced"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.347820 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"c72fdf37ef4e6a98125f2c906a887f33d0d50801813254b3dbe632e126d1639a"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.347841 4672 scope.go:117] "RemoveContainer" containerID="be2c87c8e05952dbb0cedcf0cbcc2ad775db0f756c18accfa273872c875b7f56" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.351056 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9e59740-c3a1-48d6-847a-853862002d42","Type":"ContainerStarted","Data":"9fc111aa4e40fc013a83bf9bd3f1d3749f8836f2c72e002516bd279aa3455901"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.360883 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c","Type":"ContainerStarted","Data":"64e36769a068d84e7a1559fa74e74e014f9abee15048b52c290a66e58d1250be"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.383185 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" event={"ID":"abc5aab9-1196-4fac-9d39-d44d60a0a6d5","Type":"ContainerStarted","Data":"a0742bb6c3bc1235f49809fe17051ff39debfb3c0e4d0e89cf3af01f7da9ebf6"} Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.384364 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.387290 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-796c559987-ttpm4"] Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.409559 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" podStartSLOduration=4.409534021 podStartE2EDuration="4.409534021s" podCreationTimestamp="2025-10-07 15:07:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:07:57.404714673 +0000 UTC m=+1154.379893254" watchObservedRunningTime="2025-10-07 15:07:57.409534021 +0000 UTC m=+1154.384712602" Oct 07 15:07:57 crc kubenswrapper[4672]: W1007 15:07:57.413594 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7aa0b1b9_35ce_47c6_859d_86fdf4138b2d.slice/crio-9aeba4a70e77476ab36dba5e5b8db11a88b16580adf38bb57d70f43e7cca5b40 WatchSource:0}: Error finding container 9aeba4a70e77476ab36dba5e5b8db11a88b16580adf38bb57d70f43e7cca5b40: Status 404 returned error can't find the container with id 9aeba4a70e77476ab36dba5e5b8db11a88b16580adf38bb57d70f43e7cca5b40 Oct 07 15:07:57 crc kubenswrapper[4672]: I1007 15:07:57.915247 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2425fa8e-b1c1-4289-8b0b-a624d5489917" path="/var/lib/kubelet/pods/2425fa8e-b1c1-4289-8b0b-a624d5489917/volumes" Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.403086 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c","Type":"ContainerStarted","Data":"d621dfb525bdbd2a49be893cb65811d2719808820a8a02667192b48d7cfb9f7c"} Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.403138 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerName="glance-log" containerID="cri-o://64e36769a068d84e7a1559fa74e74e014f9abee15048b52c290a66e58d1250be" gracePeriod=30 Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.403153 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerName="glance-httpd" containerID="cri-o://d621dfb525bdbd2a49be893cb65811d2719808820a8a02667192b48d7cfb9f7c" gracePeriod=30 Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.429002 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-796c559987-ttpm4" event={"ID":"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d","Type":"ContainerStarted","Data":"9aeba4a70e77476ab36dba5e5b8db11a88b16580adf38bb57d70f43e7cca5b40"} Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.431745 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9e59740-c3a1-48d6-847a-853862002d42","Type":"ContainerStarted","Data":"9cbdc603e8087eb3e993909a889917f340547020da596b7f594c81aa57fdd781"} Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.431952 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f9e59740-c3a1-48d6-847a-853862002d42" containerName="glance-log" containerID="cri-o://9fc111aa4e40fc013a83bf9bd3f1d3749f8836f2c72e002516bd279aa3455901" gracePeriod=30 Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.431972 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f9e59740-c3a1-48d6-847a-853862002d42" containerName="glance-httpd" containerID="cri-o://9cbdc603e8087eb3e993909a889917f340547020da596b7f594c81aa57fdd781" gracePeriod=30 Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.472062 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.472041708 podStartE2EDuration="5.472041708s" podCreationTimestamp="2025-10-07 15:07:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:07:58.467667823 +0000 UTC m=+1155.442846414" watchObservedRunningTime="2025-10-07 15:07:58.472041708 +0000 UTC m=+1155.447220309" Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.475804 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.475784776 podStartE2EDuration="5.475784776s" podCreationTimestamp="2025-10-07 15:07:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:07:58.443707965 +0000 UTC m=+1155.418886546" watchObservedRunningTime="2025-10-07 15:07:58.475784776 +0000 UTC m=+1155.450963357" Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.924215 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8124-account-create-z59nd"] Oct 07 15:07:58 crc kubenswrapper[4672]: E1007 15:07:58.924892 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2425fa8e-b1c1-4289-8b0b-a624d5489917" containerName="init" Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.924918 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2425fa8e-b1c1-4289-8b0b-a624d5489917" containerName="init" Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.925196 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="2425fa8e-b1c1-4289-8b0b-a624d5489917" containerName="init" Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.930612 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8124-account-create-z59nd" Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.934850 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8124-account-create-z59nd"] Oct 07 15:07:58 crc kubenswrapper[4672]: I1007 15:07:58.937156 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.037113 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmnp9\" (UniqueName: \"kubernetes.io/projected/8cffe4dc-e78b-4b0e-a3cc-093777856e62-kube-api-access-bmnp9\") pod \"barbican-8124-account-create-z59nd\" (UID: \"8cffe4dc-e78b-4b0e-a3cc-093777856e62\") " pod="openstack/barbican-8124-account-create-z59nd" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.110292 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-5189-account-create-d5h2g"] Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.114287 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5189-account-create-d5h2g" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.118871 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.124696 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5189-account-create-d5h2g"] Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.138795 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmnp9\" (UniqueName: \"kubernetes.io/projected/8cffe4dc-e78b-4b0e-a3cc-093777856e62-kube-api-access-bmnp9\") pod \"barbican-8124-account-create-z59nd\" (UID: \"8cffe4dc-e78b-4b0e-a3cc-093777856e62\") " pod="openstack/barbican-8124-account-create-z59nd" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.162490 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmnp9\" (UniqueName: \"kubernetes.io/projected/8cffe4dc-e78b-4b0e-a3cc-093777856e62-kube-api-access-bmnp9\") pod \"barbican-8124-account-create-z59nd\" (UID: \"8cffe4dc-e78b-4b0e-a3cc-093777856e62\") " pod="openstack/barbican-8124-account-create-z59nd" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.240908 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bfsn\" (UniqueName: \"kubernetes.io/projected/3bab44d9-a425-4213-ba3d-58d6a976da25-kube-api-access-7bfsn\") pod \"cinder-5189-account-create-d5h2g\" (UID: \"3bab44d9-a425-4213-ba3d-58d6a976da25\") " pod="openstack/cinder-5189-account-create-d5h2g" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.269338 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8124-account-create-z59nd" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.325541 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-913d-account-create-62bjt"] Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.326826 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-913d-account-create-62bjt" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.328968 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.341069 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-913d-account-create-62bjt"] Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.342730 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bfsn\" (UniqueName: \"kubernetes.io/projected/3bab44d9-a425-4213-ba3d-58d6a976da25-kube-api-access-7bfsn\") pod \"cinder-5189-account-create-d5h2g\" (UID: \"3bab44d9-a425-4213-ba3d-58d6a976da25\") " pod="openstack/cinder-5189-account-create-d5h2g" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.360529 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bfsn\" (UniqueName: \"kubernetes.io/projected/3bab44d9-a425-4213-ba3d-58d6a976da25-kube-api-access-7bfsn\") pod \"cinder-5189-account-create-d5h2g\" (UID: \"3bab44d9-a425-4213-ba3d-58d6a976da25\") " pod="openstack/cinder-5189-account-create-d5h2g" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.442232 4672 generic.go:334] "Generic (PLEG): container finished" podID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerID="d621dfb525bdbd2a49be893cb65811d2719808820a8a02667192b48d7cfb9f7c" exitCode=0 Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.442638 4672 generic.go:334] "Generic (PLEG): container finished" podID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerID="64e36769a068d84e7a1559fa74e74e014f9abee15048b52c290a66e58d1250be" exitCode=143 Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.442306 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c","Type":"ContainerDied","Data":"d621dfb525bdbd2a49be893cb65811d2719808820a8a02667192b48d7cfb9f7c"} Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.442687 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c","Type":"ContainerDied","Data":"64e36769a068d84e7a1559fa74e74e014f9abee15048b52c290a66e58d1250be"} Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.444479 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkl5h\" (UniqueName: \"kubernetes.io/projected/ce3ab335-518f-4c6e-8651-894af34abdd7-kube-api-access-hkl5h\") pod \"neutron-913d-account-create-62bjt\" (UID: \"ce3ab335-518f-4c6e-8651-894af34abdd7\") " pod="openstack/neutron-913d-account-create-62bjt" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.444850 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5189-account-create-d5h2g" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.445497 4672 generic.go:334] "Generic (PLEG): container finished" podID="f9e59740-c3a1-48d6-847a-853862002d42" containerID="9cbdc603e8087eb3e993909a889917f340547020da596b7f594c81aa57fdd781" exitCode=0 Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.445520 4672 generic.go:334] "Generic (PLEG): container finished" podID="f9e59740-c3a1-48d6-847a-853862002d42" containerID="9fc111aa4e40fc013a83bf9bd3f1d3749f8836f2c72e002516bd279aa3455901" exitCode=143 Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.445577 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9e59740-c3a1-48d6-847a-853862002d42","Type":"ContainerDied","Data":"9cbdc603e8087eb3e993909a889917f340547020da596b7f594c81aa57fdd781"} Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.445608 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9e59740-c3a1-48d6-847a-853862002d42","Type":"ContainerDied","Data":"9fc111aa4e40fc013a83bf9bd3f1d3749f8836f2c72e002516bd279aa3455901"} Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.447967 4672 generic.go:334] "Generic (PLEG): container finished" podID="e9bd020e-9e5a-4cef-a33d-e8b254545d46" containerID="58690f3b57da320b8f4efe3215a4333c170fd1ebefa254e9a59e3db1090842ad" exitCode=0 Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.448090 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6q2nt" event={"ID":"e9bd020e-9e5a-4cef-a33d-e8b254545d46","Type":"ContainerDied","Data":"58690f3b57da320b8f4efe3215a4333c170fd1ebefa254e9a59e3db1090842ad"} Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.546305 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkl5h\" (UniqueName: \"kubernetes.io/projected/ce3ab335-518f-4c6e-8651-894af34abdd7-kube-api-access-hkl5h\") pod \"neutron-913d-account-create-62bjt\" (UID: \"ce3ab335-518f-4c6e-8651-894af34abdd7\") " pod="openstack/neutron-913d-account-create-62bjt" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.572057 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkl5h\" (UniqueName: \"kubernetes.io/projected/ce3ab335-518f-4c6e-8651-894af34abdd7-kube-api-access-hkl5h\") pod \"neutron-913d-account-create-62bjt\" (UID: \"ce3ab335-518f-4c6e-8651-894af34abdd7\") " pod="openstack/neutron-913d-account-create-62bjt" Oct 07 15:07:59 crc kubenswrapper[4672]: I1007 15:07:59.656393 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-913d-account-create-62bjt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.167940 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-66c969bf9c-mwvhc"] Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.217298 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-bbdf8cc6b-btjlc"] Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.219790 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.228644 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-bbdf8cc6b-btjlc"] Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.229029 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.254427 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-796c559987-ttpm4"] Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.286674 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-694678548d-bbtxt"] Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.288289 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.293291 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-logs\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.293320 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-config-data\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.293345 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-scripts\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.293376 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-tls-certs\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.293409 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlgzs\" (UniqueName: \"kubernetes.io/projected/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-kube-api-access-dlgzs\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.293445 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-combined-ca-bundle\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.293481 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-secret-key\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.306835 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-694678548d-bbtxt"] Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395081 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3c1e726-5fce-4f95-952f-effb9a8993f3-logs\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395136 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-secret-key\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395169 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c1e726-5fce-4f95-952f-effb9a8993f3-combined-ca-bundle\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395205 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-logs\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395224 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-config-data\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395244 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k7kh\" (UniqueName: \"kubernetes.io/projected/d3c1e726-5fce-4f95-952f-effb9a8993f3-kube-api-access-9k7kh\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395261 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c1e726-5fce-4f95-952f-effb9a8993f3-horizon-tls-certs\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395280 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-scripts\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395311 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-tls-certs\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395329 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d3c1e726-5fce-4f95-952f-effb9a8993f3-horizon-secret-key\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395360 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlgzs\" (UniqueName: \"kubernetes.io/projected/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-kube-api-access-dlgzs\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395382 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3c1e726-5fce-4f95-952f-effb9a8993f3-scripts\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395405 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3c1e726-5fce-4f95-952f-effb9a8993f3-config-data\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.395439 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-combined-ca-bundle\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.397007 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-logs\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.397102 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-scripts\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.397923 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-config-data\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.400742 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-combined-ca-bundle\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.401881 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-tls-certs\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.402128 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-secret-key\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.414967 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlgzs\" (UniqueName: \"kubernetes.io/projected/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-kube-api-access-dlgzs\") pod \"horizon-bbdf8cc6b-btjlc\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.497087 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c1e726-5fce-4f95-952f-effb9a8993f3-combined-ca-bundle\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.497162 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k7kh\" (UniqueName: \"kubernetes.io/projected/d3c1e726-5fce-4f95-952f-effb9a8993f3-kube-api-access-9k7kh\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.497180 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c1e726-5fce-4f95-952f-effb9a8993f3-horizon-tls-certs\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.497220 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d3c1e726-5fce-4f95-952f-effb9a8993f3-horizon-secret-key\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.497258 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3c1e726-5fce-4f95-952f-effb9a8993f3-scripts\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.497280 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3c1e726-5fce-4f95-952f-effb9a8993f3-config-data\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.497328 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3c1e726-5fce-4f95-952f-effb9a8993f3-logs\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.501509 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c1e726-5fce-4f95-952f-effb9a8993f3-horizon-tls-certs\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.501635 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c1e726-5fce-4f95-952f-effb9a8993f3-combined-ca-bundle\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.503174 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3c1e726-5fce-4f95-952f-effb9a8993f3-logs\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.504567 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3c1e726-5fce-4f95-952f-effb9a8993f3-scripts\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.509091 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3c1e726-5fce-4f95-952f-effb9a8993f3-config-data\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.524562 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d3c1e726-5fce-4f95-952f-effb9a8993f3-horizon-secret-key\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.528667 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k7kh\" (UniqueName: \"kubernetes.io/projected/d3c1e726-5fce-4f95-952f-effb9a8993f3-kube-api-access-9k7kh\") pod \"horizon-694678548d-bbtxt\" (UID: \"d3c1e726-5fce-4f95-952f-effb9a8993f3\") " pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.549449 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:02 crc kubenswrapper[4672]: I1007 15:08:02.626896 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:04 crc kubenswrapper[4672]: I1007 15:08:04.330877 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:08:04 crc kubenswrapper[4672]: I1007 15:08:04.379873 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5988746689-84wxb"] Oct 07 15:08:04 crc kubenswrapper[4672]: I1007 15:08:04.380210 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5988746689-84wxb" podUID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" containerName="dnsmasq-dns" containerID="cri-o://722775be4f4cf597f13cd7fe6551e06a16fb3d5f711ca723258e94584b9d39ee" gracePeriod=10 Oct 07 15:08:05 crc kubenswrapper[4672]: I1007 15:08:05.520556 4672 generic.go:334] "Generic (PLEG): container finished" podID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" containerID="722775be4f4cf597f13cd7fe6551e06a16fb3d5f711ca723258e94584b9d39ee" exitCode=0 Oct 07 15:08:05 crc kubenswrapper[4672]: I1007 15:08:05.520801 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5988746689-84wxb" event={"ID":"de394e3e-48ba-4410-8fb2-a99e03f27ac9","Type":"ContainerDied","Data":"722775be4f4cf597f13cd7fe6551e06a16fb3d5f711ca723258e94584b9d39ee"} Oct 07 15:08:07 crc kubenswrapper[4672]: I1007 15:08:07.603452 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5988746689-84wxb" podUID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: connect: connection refused" Oct 07 15:08:08 crc kubenswrapper[4672]: E1007 15:08:08.097960 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-placement-api:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:08:08 crc kubenswrapper[4672]: E1007 15:08:08.098334 4672 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-placement-api:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:08:08 crc kubenswrapper[4672]: E1007 15:08:08.098558 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:38.102.83.103:5001/podified-antelope-centos9/openstack-placement-api:b78cfc68a577b1553523c8a70a34e297,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l8g8b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-lzmcv_openstack(8c3d2245-20b7-4ef1-a2ba-9548d4c5d017): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 15:08:08 crc kubenswrapper[4672]: E1007 15:08:08.099733 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-lzmcv" podUID="8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" Oct 07 15:08:08 crc kubenswrapper[4672]: E1007 15:08:08.547997 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.103:5001/podified-antelope-centos9/openstack-placement-api:b78cfc68a577b1553523c8a70a34e297\\\"\"" pod="openstack/placement-db-sync-lzmcv" podUID="8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" Oct 07 15:08:08 crc kubenswrapper[4672]: E1007 15:08:08.870078 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-ceilometer-central:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:08:08 crc kubenswrapper[4672]: E1007 15:08:08.870382 4672 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-ceilometer-central:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:08:08 crc kubenswrapper[4672]: E1007 15:08:08.871344 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:38.102.83.103:5001/podified-antelope-centos9/openstack-ceilometer-central:b78cfc68a577b1553523c8a70a34e297,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n9dhf9h55dh556hf5h7bh54bh686h67bh55bh94h5c4h9fh6dh565hb8h694hc8h55dh595h5bfh548h649h58hc5h549hfdh687hddhf7h55fhf4q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rd59v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(7f15bf66-1b2a-4084-8e99-33ed3d5e8f41): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 15:08:08 crc kubenswrapper[4672]: I1007 15:08:08.996079 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.145643 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.161785 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-credential-keys\") pod \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.161860 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-config-data\") pod \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.161998 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-combined-ca-bundle\") pod \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.162058 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njf9f\" (UniqueName: \"kubernetes.io/projected/e9bd020e-9e5a-4cef-a33d-e8b254545d46-kube-api-access-njf9f\") pod \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.162078 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-fernet-keys\") pod \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.162148 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-scripts\") pod \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\" (UID: \"e9bd020e-9e5a-4cef-a33d-e8b254545d46\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.168342 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e9bd020e-9e5a-4cef-a33d-e8b254545d46" (UID: "e9bd020e-9e5a-4cef-a33d-e8b254545d46"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.169547 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-scripts" (OuterVolumeSpecName: "scripts") pod "e9bd020e-9e5a-4cef-a33d-e8b254545d46" (UID: "e9bd020e-9e5a-4cef-a33d-e8b254545d46"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.171304 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9bd020e-9e5a-4cef-a33d-e8b254545d46-kube-api-access-njf9f" (OuterVolumeSpecName: "kube-api-access-njf9f") pod "e9bd020e-9e5a-4cef-a33d-e8b254545d46" (UID: "e9bd020e-9e5a-4cef-a33d-e8b254545d46"). InnerVolumeSpecName "kube-api-access-njf9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.182205 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e9bd020e-9e5a-4cef-a33d-e8b254545d46" (UID: "e9bd020e-9e5a-4cef-a33d-e8b254545d46"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.204264 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9bd020e-9e5a-4cef-a33d-e8b254545d46" (UID: "e9bd020e-9e5a-4cef-a33d-e8b254545d46"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.210669 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-config-data" (OuterVolumeSpecName: "config-data") pod "e9bd020e-9e5a-4cef-a33d-e8b254545d46" (UID: "e9bd020e-9e5a-4cef-a33d-e8b254545d46"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.263642 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-combined-ca-bundle\") pod \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.263733 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-config-data\") pod \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.263831 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-public-tls-certs\") pod \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.263866 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-scripts\") pod \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.263889 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.263963 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm9db\" (UniqueName: \"kubernetes.io/projected/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-kube-api-access-pm9db\") pod \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.264068 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-httpd-run\") pod \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.264109 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-logs\") pod \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\" (UID: \"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.264994 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-logs" (OuterVolumeSpecName: "logs") pod "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" (UID: "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.267607 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.267644 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.267656 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njf9f\" (UniqueName: \"kubernetes.io/projected/e9bd020e-9e5a-4cef-a33d-e8b254545d46-kube-api-access-njf9f\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.267668 4672 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.267678 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.267689 4672 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.267712 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9bd020e-9e5a-4cef-a33d-e8b254545d46-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.268435 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" (UID: "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.278172 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-scripts" (OuterVolumeSpecName: "scripts") pod "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" (UID: "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.279365 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" (UID: "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.290544 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.293262 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-kube-api-access-pm9db" (OuterVolumeSpecName: "kube-api-access-pm9db") pod "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" (UID: "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c"). InnerVolumeSpecName "kube-api-access-pm9db". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.367137 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-config-data" (OuterVolumeSpecName: "config-data") pod "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" (UID: "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.367179 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" (UID: "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.368371 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-scripts\") pod \"f9e59740-c3a1-48d6-847a-853862002d42\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.368436 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-httpd-run\") pod \"f9e59740-c3a1-48d6-847a-853862002d42\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.368513 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-internal-tls-certs\") pod \"f9e59740-c3a1-48d6-847a-853862002d42\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.368561 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-combined-ca-bundle\") pod \"f9e59740-c3a1-48d6-847a-853862002d42\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.368602 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-config-data\") pod \"f9e59740-c3a1-48d6-847a-853862002d42\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.368621 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-logs\") pod \"f9e59740-c3a1-48d6-847a-853862002d42\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.368676 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"f9e59740-c3a1-48d6-847a-853862002d42\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.368727 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6ztm\" (UniqueName: \"kubernetes.io/projected/f9e59740-c3a1-48d6-847a-853862002d42-kube-api-access-w6ztm\") pod \"f9e59740-c3a1-48d6-847a-853862002d42\" (UID: \"f9e59740-c3a1-48d6-847a-853862002d42\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.369370 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.369388 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.369399 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.369427 4672 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.369440 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm9db\" (UniqueName: \"kubernetes.io/projected/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-kube-api-access-pm9db\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.369453 4672 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.373347 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.374174 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f9e59740-c3a1-48d6-847a-853862002d42" (UID: "f9e59740-c3a1-48d6-847a-853862002d42"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.378898 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-logs" (OuterVolumeSpecName: "logs") pod "f9e59740-c3a1-48d6-847a-853862002d42" (UID: "f9e59740-c3a1-48d6-847a-853862002d42"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.459542 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "f9e59740-c3a1-48d6-847a-853862002d42" (UID: "f9e59740-c3a1-48d6-847a-853862002d42"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.459933 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9e59740-c3a1-48d6-847a-853862002d42-kube-api-access-w6ztm" (OuterVolumeSpecName: "kube-api-access-w6ztm") pod "f9e59740-c3a1-48d6-847a-853862002d42" (UID: "f9e59740-c3a1-48d6-847a-853862002d42"). InnerVolumeSpecName "kube-api-access-w6ztm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.459957 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-scripts" (OuterVolumeSpecName: "scripts") pod "f9e59740-c3a1-48d6-847a-853862002d42" (UID: "f9e59740-c3a1-48d6-847a-853862002d42"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.471593 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-nb\") pod \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.471674 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-swift-storage-0\") pod \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.471712 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-config\") pod \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.471845 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-sb\") pod \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.471872 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-svc\") pod \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.471961 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j97qb\" (UniqueName: \"kubernetes.io/projected/de394e3e-48ba-4410-8fb2-a99e03f27ac9-kube-api-access-j97qb\") pod \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\" (UID: \"de394e3e-48ba-4410-8fb2-a99e03f27ac9\") " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.476314 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8124-account-create-z59nd"] Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.479855 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.479934 4672 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.479951 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6ztm\" (UniqueName: \"kubernetes.io/projected/f9e59740-c3a1-48d6-847a-853862002d42-kube-api-access-w6ztm\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.479964 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.479975 4672 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9e59740-c3a1-48d6-847a-853862002d42-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.496609 4672 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.510953 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de394e3e-48ba-4410-8fb2-a99e03f27ac9-kube-api-access-j97qb" (OuterVolumeSpecName: "kube-api-access-j97qb") pod "de394e3e-48ba-4410-8fb2-a99e03f27ac9" (UID: "de394e3e-48ba-4410-8fb2-a99e03f27ac9"). InnerVolumeSpecName "kube-api-access-j97qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.522543 4672 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.554761 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b037c52-40e8-43b9-8e8c-51a19c6f1d8c","Type":"ContainerDied","Data":"97f5ed83f5affd04b27c36f47ffc36f17c1339f6ec07e5a75b90d23177efaada"} Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.554812 4672 scope.go:117] "RemoveContainer" containerID="d621dfb525bdbd2a49be893cb65811d2719808820a8a02667192b48d7cfb9f7c" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.554940 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.558314 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.558331 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5988746689-84wxb" event={"ID":"de394e3e-48ba-4410-8fb2-a99e03f27ac9","Type":"ContainerDied","Data":"ff5d04f4db83136ce8b5d73fdba4fa57015b50dd3b30279a9e2065eac657d1b8"} Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.561182 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8124-account-create-z59nd" event={"ID":"8cffe4dc-e78b-4b0e-a3cc-093777856e62","Type":"ContainerStarted","Data":"7f5c834bb2181c7d2b2f27cdfc8b06233e4e2facc4a7ca3a408ad8c6a991fd11"} Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.566178 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9e59740-c3a1-48d6-847a-853862002d42","Type":"ContainerDied","Data":"4e7df20ae7ce552b928698f56592d252333dd010434b161dd49687ac90885209"} Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.566192 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.569088 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6q2nt" event={"ID":"e9bd020e-9e5a-4cef-a33d-e8b254545d46","Type":"ContainerDied","Data":"71e8210bd5667aa6fde03c6994996a2888782ec18ac067ef77662b09b2bacf9d"} Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.569127 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71e8210bd5667aa6fde03c6994996a2888782ec18ac067ef77662b09b2bacf9d" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.569232 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6q2nt" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.577217 4672 scope.go:117] "RemoveContainer" containerID="64e36769a068d84e7a1559fa74e74e014f9abee15048b52c290a66e58d1250be" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.582342 4672 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.582370 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j97qb\" (UniqueName: \"kubernetes.io/projected/de394e3e-48ba-4410-8fb2-a99e03f27ac9-kube-api-access-j97qb\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.582383 4672 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.609139 4672 scope.go:117] "RemoveContainer" containerID="722775be4f4cf597f13cd7fe6551e06a16fb3d5f711ca723258e94584b9d39ee" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.648656 4672 scope.go:117] "RemoveContainer" containerID="f3c277ade9b0539e7eda32fb469869489269533924912e570901900e27e6c93d" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.722141 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-bbdf8cc6b-btjlc"] Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.722680 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" (UID: "4b037c52-40e8-43b9-8e8c-51a19c6f1d8c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.734103 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5189-account-create-d5h2g"] Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.734686 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9e59740-c3a1-48d6-847a-853862002d42" (UID: "f9e59740-c3a1-48d6-847a-853862002d42"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.743469 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "de394e3e-48ba-4410-8fb2-a99e03f27ac9" (UID: "de394e3e-48ba-4410-8fb2-a99e03f27ac9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.745800 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-913d-account-create-62bjt"] Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.764192 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-config-data" (OuterVolumeSpecName: "config-data") pod "f9e59740-c3a1-48d6-847a-853862002d42" (UID: "f9e59740-c3a1-48d6-847a-853862002d42"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.785793 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.785845 4672 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.785859 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.785869 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.787834 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-config" (OuterVolumeSpecName: "config") pod "de394e3e-48ba-4410-8fb2-a99e03f27ac9" (UID: "de394e3e-48ba-4410-8fb2-a99e03f27ac9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.787931 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f9e59740-c3a1-48d6-847a-853862002d42" (UID: "f9e59740-c3a1-48d6-847a-853862002d42"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.790293 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "de394e3e-48ba-4410-8fb2-a99e03f27ac9" (UID: "de394e3e-48ba-4410-8fb2-a99e03f27ac9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.796459 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "de394e3e-48ba-4410-8fb2-a99e03f27ac9" (UID: "de394e3e-48ba-4410-8fb2-a99e03f27ac9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.806116 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "de394e3e-48ba-4410-8fb2-a99e03f27ac9" (UID: "de394e3e-48ba-4410-8fb2-a99e03f27ac9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.888034 4672 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9e59740-c3a1-48d6-847a-853862002d42-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.888066 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.888075 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.888086 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.888095 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de394e3e-48ba-4410-8fb2-a99e03f27ac9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.907151 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-694678548d-bbtxt"] Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.908265 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.917728 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.930470 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:08:09 crc kubenswrapper[4672]: E1007 15:08:09.931171 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" containerName="dnsmasq-dns" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931189 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" containerName="dnsmasq-dns" Oct 07 15:08:09 crc kubenswrapper[4672]: E1007 15:08:09.931203 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9bd020e-9e5a-4cef-a33d-e8b254545d46" containerName="keystone-bootstrap" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931209 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9bd020e-9e5a-4cef-a33d-e8b254545d46" containerName="keystone-bootstrap" Oct 07 15:08:09 crc kubenswrapper[4672]: E1007 15:08:09.931220 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9e59740-c3a1-48d6-847a-853862002d42" containerName="glance-httpd" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931226 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9e59740-c3a1-48d6-847a-853862002d42" containerName="glance-httpd" Oct 07 15:08:09 crc kubenswrapper[4672]: E1007 15:08:09.931237 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" containerName="init" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931301 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" containerName="init" Oct 07 15:08:09 crc kubenswrapper[4672]: E1007 15:08:09.931316 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9e59740-c3a1-48d6-847a-853862002d42" containerName="glance-log" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931323 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9e59740-c3a1-48d6-847a-853862002d42" containerName="glance-log" Oct 07 15:08:09 crc kubenswrapper[4672]: E1007 15:08:09.931346 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerName="glance-httpd" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931353 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerName="glance-httpd" Oct 07 15:08:09 crc kubenswrapper[4672]: E1007 15:08:09.931369 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerName="glance-log" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931377 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerName="glance-log" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931567 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerName="glance-log" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931583 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" containerName="glance-httpd" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931595 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9bd020e-9e5a-4cef-a33d-e8b254545d46" containerName="keystone-bootstrap" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931608 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9e59740-c3a1-48d6-847a-853862002d42" containerName="glance-log" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931616 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" containerName="dnsmasq-dns" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.931649 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9e59740-c3a1-48d6-847a-853862002d42" containerName="glance-httpd" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.932520 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.940776 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.941162 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-x7z9z" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.941431 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.941615 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 07 15:08:09 crc kubenswrapper[4672]: I1007 15:08:09.965171 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.027064 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.033969 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.066994 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.068472 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.072733 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.072782 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.078459 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.091312 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-config-data\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.091407 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdzh2\" (UniqueName: \"kubernetes.io/projected/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-kube-api-access-cdzh2\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.091446 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-logs\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.091493 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.091541 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.091565 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.091588 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-scripts\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.091622 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.156259 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-6q2nt"] Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.168789 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-6q2nt"] Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.193818 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.193898 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.193931 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.193983 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194188 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194257 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194304 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4zhj\" (UniqueName: \"kubernetes.io/projected/70296716-492c-452b-8d58-0591749c61f1-kube-api-access-n4zhj\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194346 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-scripts\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194375 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194449 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194499 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194532 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-config-data\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194632 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-logs\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194657 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194722 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdzh2\" (UniqueName: \"kubernetes.io/projected/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-kube-api-access-cdzh2\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194732 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194801 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-logs\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.194850 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.195744 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-logs\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.200857 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.202810 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.210892 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-scripts\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.213170 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-config-data\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.217412 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdzh2\" (UniqueName: \"kubernetes.io/projected/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-kube-api-access-cdzh2\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.238374 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.250122 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-lsj7z"] Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.251492 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.254840 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.255087 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.255426 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.255556 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-rtlqr" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.258582 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lsj7z"] Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.296911 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.297321 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-logs\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.297337 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.297935 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-logs\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.298008 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.298062 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.298138 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.298198 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4zhj\" (UniqueName: \"kubernetes.io/projected/70296716-492c-452b-8d58-0591749c61f1-kube-api-access-n4zhj\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.298264 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.298312 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.299277 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.306624 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.307937 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.310442 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: W1007 15:08:10.311627 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fdc29d3_c934_4da2_870a_9ee2fd82b5e4.slice/crio-50392358c71a51f4782ac87ca03d782339d6aa19aedb651da24f09e7e2604e72 WatchSource:0}: Error finding container 50392358c71a51f4782ac87ca03d782339d6aa19aedb651da24f09e7e2604e72: Status 404 returned error can't find the container with id 50392358c71a51f4782ac87ca03d782339d6aa19aedb651da24f09e7e2604e72 Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.315967 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.316411 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.329112 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4zhj\" (UniqueName: \"kubernetes.io/projected/70296716-492c-452b-8d58-0591749c61f1-kube-api-access-n4zhj\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.350973 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.394578 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.399786 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-combined-ca-bundle\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.399865 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-scripts\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.399978 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-fernet-keys\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.400061 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-config-data\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.400090 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-credential-keys\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.400127 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcd9p\" (UniqueName: \"kubernetes.io/projected/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-kube-api-access-jcd9p\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.401631 4672 scope.go:117] "RemoveContainer" containerID="9cbdc603e8087eb3e993909a889917f340547020da596b7f594c81aa57fdd781" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.501833 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-combined-ca-bundle\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.501884 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-scripts\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.501935 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-fernet-keys\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.501969 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-config-data\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.501985 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-credential-keys\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.502014 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcd9p\" (UniqueName: \"kubernetes.io/projected/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-kube-api-access-jcd9p\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.508395 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-credential-keys\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.511825 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-combined-ca-bundle\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.513514 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-fernet-keys\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.513734 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-scripts\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.514007 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-config-data\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.523227 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcd9p\" (UniqueName: \"kubernetes.io/projected/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-kube-api-access-jcd9p\") pod \"keystone-bootstrap-lsj7z\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.548204 4672 scope.go:117] "RemoveContainer" containerID="9fc111aa4e40fc013a83bf9bd3f1d3749f8836f2c72e002516bd279aa3455901" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.602702 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-694678548d-bbtxt" event={"ID":"d3c1e726-5fce-4f95-952f-effb9a8993f3","Type":"ContainerStarted","Data":"3fa9562ca0f95622bff0c78f10be5d67378bcb716613977520456dfd5518417c"} Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.610682 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-913d-account-create-62bjt" event={"ID":"ce3ab335-518f-4c6e-8651-894af34abdd7","Type":"ContainerStarted","Data":"8a5f5c02171c66fc4050efd478fe5a7e543734a02d05d02c6c77c6d355a5cbed"} Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.620823 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5189-account-create-d5h2g" event={"ID":"3bab44d9-a425-4213-ba3d-58d6a976da25","Type":"ContainerStarted","Data":"ea3160597ecb863992cb42b4720dbe9c741dfcd94048eb34d4cc176ee09a1d89"} Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.628436 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5fcb5df989-stdqv" event={"ID":"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b","Type":"ContainerStarted","Data":"8e8584976358a6e50ddc0fc90a3a450f81a6e29d6a48d729a0aa8754312b31d8"} Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.630080 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8124-account-create-z59nd" event={"ID":"8cffe4dc-e78b-4b0e-a3cc-093777856e62","Type":"ContainerStarted","Data":"75f2292253e63b298e3f1708d7a9faa1ac3efd45a491d9cbd4d1ea30b7a48b85"} Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.634118 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-796c559987-ttpm4" event={"ID":"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d","Type":"ContainerStarted","Data":"b69fa2adc76165439524c5edcd2cdf714c8f2f6e20caf17158f4b5141adfa2b5"} Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.634166 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-796c559987-ttpm4" event={"ID":"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d","Type":"ContainerStarted","Data":"e2ffa69194e8cdaf98a5bc6c34739cd71dbd67aa5ccab0b47cd75822d7680dec"} Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.634388 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-796c559987-ttpm4" podUID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerName="horizon-log" containerID="cri-o://e2ffa69194e8cdaf98a5bc6c34739cd71dbd67aa5ccab0b47cd75822d7680dec" gracePeriod=30 Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.634630 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-796c559987-ttpm4" podUID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerName="horizon" containerID="cri-o://b69fa2adc76165439524c5edcd2cdf714c8f2f6e20caf17158f4b5141adfa2b5" gracePeriod=30 Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.640450 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66c969bf9c-mwvhc" event={"ID":"05eff0ec-546c-4c65-95da-51a499040af2","Type":"ContainerStarted","Data":"17ad6da9eb58157ef7daa0f3c7349469bee4d16cfa20b1fd2203dd5c3e39c216"} Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.648153 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bbdf8cc6b-btjlc" event={"ID":"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4","Type":"ContainerStarted","Data":"50392358c71a51f4782ac87ca03d782339d6aa19aedb651da24f09e7e2604e72"} Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.678962 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-796c559987-ttpm4" podStartSLOduration=4.241708184 podStartE2EDuration="15.678938169s" podCreationTimestamp="2025-10-07 15:07:55 +0000 UTC" firstStartedPulling="2025-10-07 15:07:57.423966766 +0000 UTC m=+1154.399145347" lastFinishedPulling="2025-10-07 15:08:08.861196751 +0000 UTC m=+1165.836375332" observedRunningTime="2025-10-07 15:08:10.672113073 +0000 UTC m=+1167.647291674" watchObservedRunningTime="2025-10-07 15:08:10.678938169 +0000 UTC m=+1167.654116760" Oct 07 15:08:10 crc kubenswrapper[4672]: I1007 15:08:10.721454 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.138436 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:08:11 crc kubenswrapper[4672]: W1007 15:08:11.166263 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded2a01b2_d44a_48b1_8463_84d5d873b9a7.slice/crio-baad17181cc3ba92b3113292b3bcac1b0a8d9e04b9fa019c358ca3c7b7b999bb WatchSource:0}: Error finding container baad17181cc3ba92b3113292b3bcac1b0a8d9e04b9fa019c358ca3c7b7b999bb: Status 404 returned error can't find the container with id baad17181cc3ba92b3113292b3bcac1b0a8d9e04b9fa019c358ca3c7b7b999bb Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.294423 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.400912 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lsj7z"] Oct 07 15:08:11 crc kubenswrapper[4672]: W1007 15:08:11.414874 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode32e5c0f_b0e2_46fa_8672_08a4b6bc9e2b.slice/crio-f19060b4ed24a4ec64618abdd60a84612e19f487756293d04cade2368100707b WatchSource:0}: Error finding container f19060b4ed24a4ec64618abdd60a84612e19f487756293d04cade2368100707b: Status 404 returned error can't find the container with id f19060b4ed24a4ec64618abdd60a84612e19f487756293d04cade2368100707b Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.679087 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bbdf8cc6b-btjlc" event={"ID":"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4","Type":"ContainerStarted","Data":"f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.679406 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bbdf8cc6b-btjlc" event={"ID":"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4","Type":"ContainerStarted","Data":"3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.682104 4672 generic.go:334] "Generic (PLEG): container finished" podID="8cffe4dc-e78b-4b0e-a3cc-093777856e62" containerID="75f2292253e63b298e3f1708d7a9faa1ac3efd45a491d9cbd4d1ea30b7a48b85" exitCode=0 Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.682168 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8124-account-create-z59nd" event={"ID":"8cffe4dc-e78b-4b0e-a3cc-093777856e62","Type":"ContainerDied","Data":"75f2292253e63b298e3f1708d7a9faa1ac3efd45a491d9cbd4d1ea30b7a48b85"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.687231 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41","Type":"ContainerStarted","Data":"f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.692454 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66c969bf9c-mwvhc" event={"ID":"05eff0ec-546c-4c65-95da-51a499040af2","Type":"ContainerStarted","Data":"a0f8e90e22fa289c6355eeebf4e277ecc3be56cdcc7c15e3d0b962fdc6de8090"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.692613 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-66c969bf9c-mwvhc" podUID="05eff0ec-546c-4c65-95da-51a499040af2" containerName="horizon-log" containerID="cri-o://17ad6da9eb58157ef7daa0f3c7349469bee4d16cfa20b1fd2203dd5c3e39c216" gracePeriod=30 Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.692874 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-66c969bf9c-mwvhc" podUID="05eff0ec-546c-4c65-95da-51a499040af2" containerName="horizon" containerID="cri-o://a0f8e90e22fa289c6355eeebf4e277ecc3be56cdcc7c15e3d0b962fdc6de8090" gracePeriod=30 Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.701083 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-bbdf8cc6b-btjlc" podStartSLOduration=9.701068096 podStartE2EDuration="9.701068096s" podCreationTimestamp="2025-10-07 15:08:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:11.69597201 +0000 UTC m=+1168.671150591" watchObservedRunningTime="2025-10-07 15:08:11.701068096 +0000 UTC m=+1168.676246677" Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.707433 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70296716-492c-452b-8d58-0591749c61f1","Type":"ContainerStarted","Data":"65df83a2965111f14c864ad9a4e9a0975fc24e5f02678948c1319baf860d36cd"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.715481 4672 generic.go:334] "Generic (PLEG): container finished" podID="ce3ab335-518f-4c6e-8651-894af34abdd7" containerID="0b6b606be159db280bbd37b34ac9a9fda500280c4b1c629158985a516035c5de" exitCode=0 Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.715550 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-913d-account-create-62bjt" event={"ID":"ce3ab335-518f-4c6e-8651-894af34abdd7","Type":"ContainerDied","Data":"0b6b606be159db280bbd37b34ac9a9fda500280c4b1c629158985a516035c5de"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.718649 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-66c969bf9c-mwvhc" podStartSLOduration=4.683901342 podStartE2EDuration="18.718636291s" podCreationTimestamp="2025-10-07 15:07:53 +0000 UTC" firstStartedPulling="2025-10-07 15:07:55.056748866 +0000 UTC m=+1152.031927447" lastFinishedPulling="2025-10-07 15:08:09.091483815 +0000 UTC m=+1166.066662396" observedRunningTime="2025-10-07 15:08:11.716047297 +0000 UTC m=+1168.691225868" watchObservedRunningTime="2025-10-07 15:08:11.718636291 +0000 UTC m=+1168.693814872" Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.720119 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-694678548d-bbtxt" event={"ID":"d3c1e726-5fce-4f95-952f-effb9a8993f3","Type":"ContainerStarted","Data":"caf7da00a62e78b31cf3e16198f109cb7b43b742365a5730a1cb8dc4a0e6b0ea"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.720173 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-694678548d-bbtxt" event={"ID":"d3c1e726-5fce-4f95-952f-effb9a8993f3","Type":"ContainerStarted","Data":"6f9ff77fae87641f51d9dc5b6bd5a188f0c368dd39fa42b6d40ef90bda40dad7"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.726137 4672 generic.go:334] "Generic (PLEG): container finished" podID="3bab44d9-a425-4213-ba3d-58d6a976da25" containerID="8bafe4d85674d79b133b904440584276e23ffe5fdd37fe4308b14043556e5956" exitCode=0 Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.726212 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5189-account-create-d5h2g" event={"ID":"3bab44d9-a425-4213-ba3d-58d6a976da25","Type":"ContainerDied","Data":"8bafe4d85674d79b133b904440584276e23ffe5fdd37fe4308b14043556e5956"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.732624 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lsj7z" event={"ID":"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b","Type":"ContainerStarted","Data":"bd18ea38c2e6d705d39b63cbf64b2d7043480f4e99d32e92a6091f4966d691e2"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.732664 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lsj7z" event={"ID":"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b","Type":"ContainerStarted","Data":"f19060b4ed24a4ec64618abdd60a84612e19f487756293d04cade2368100707b"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.735884 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ed2a01b2-d44a-48b1-8463-84d5d873b9a7","Type":"ContainerStarted","Data":"baad17181cc3ba92b3113292b3bcac1b0a8d9e04b9fa019c358ca3c7b7b999bb"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.750471 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5fcb5df989-stdqv" event={"ID":"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b","Type":"ContainerStarted","Data":"50cc0512e53bcab239f8340589616507cfe294f794a7ce10fd5eb9a7553d3527"} Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.750629 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5fcb5df989-stdqv" podUID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerName="horizon-log" containerID="cri-o://8e8584976358a6e50ddc0fc90a3a450f81a6e29d6a48d729a0aa8754312b31d8" gracePeriod=30 Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.750720 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5fcb5df989-stdqv" podUID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerName="horizon" containerID="cri-o://50cc0512e53bcab239f8340589616507cfe294f794a7ce10fd5eb9a7553d3527" gracePeriod=30 Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.766904 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-694678548d-bbtxt" podStartSLOduration=9.766887337 podStartE2EDuration="9.766887337s" podCreationTimestamp="2025-10-07 15:08:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:11.760490073 +0000 UTC m=+1168.735668654" watchObservedRunningTime="2025-10-07 15:08:11.766887337 +0000 UTC m=+1168.742065918" Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.796697 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-lsj7z" podStartSLOduration=1.796674262 podStartE2EDuration="1.796674262s" podCreationTimestamp="2025-10-07 15:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:11.792907424 +0000 UTC m=+1168.768086035" watchObservedRunningTime="2025-10-07 15:08:11.796674262 +0000 UTC m=+1168.771852843" Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.813057 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5fcb5df989-stdqv" podStartSLOduration=4.687072852 podStartE2EDuration="18.813007251s" podCreationTimestamp="2025-10-07 15:07:53 +0000 UTC" firstStartedPulling="2025-10-07 15:07:54.835153551 +0000 UTC m=+1151.810332132" lastFinishedPulling="2025-10-07 15:08:08.96108795 +0000 UTC m=+1165.936266531" observedRunningTime="2025-10-07 15:08:11.812994761 +0000 UTC m=+1168.788173352" watchObservedRunningTime="2025-10-07 15:08:11.813007251 +0000 UTC m=+1168.788185832" Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.982084 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b037c52-40e8-43b9-8e8c-51a19c6f1d8c" path="/var/lib/kubelet/pods/4b037c52-40e8-43b9-8e8c-51a19c6f1d8c/volumes" Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.983984 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9bd020e-9e5a-4cef-a33d-e8b254545d46" path="/var/lib/kubelet/pods/e9bd020e-9e5a-4cef-a33d-e8b254545d46/volumes" Oct 07 15:08:11 crc kubenswrapper[4672]: I1007 15:08:11.984629 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9e59740-c3a1-48d6-847a-853862002d42" path="/var/lib/kubelet/pods/f9e59740-c3a1-48d6-847a-853862002d42/volumes" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.292776 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8124-account-create-z59nd" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.368341 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmnp9\" (UniqueName: \"kubernetes.io/projected/8cffe4dc-e78b-4b0e-a3cc-093777856e62-kube-api-access-bmnp9\") pod \"8cffe4dc-e78b-4b0e-a3cc-093777856e62\" (UID: \"8cffe4dc-e78b-4b0e-a3cc-093777856e62\") " Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.375504 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cffe4dc-e78b-4b0e-a3cc-093777856e62-kube-api-access-bmnp9" (OuterVolumeSpecName: "kube-api-access-bmnp9") pod "8cffe4dc-e78b-4b0e-a3cc-093777856e62" (UID: "8cffe4dc-e78b-4b0e-a3cc-093777856e62"). InnerVolumeSpecName "kube-api-access-bmnp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.474090 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmnp9\" (UniqueName: \"kubernetes.io/projected/8cffe4dc-e78b-4b0e-a3cc-093777856e62-kube-api-access-bmnp9\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.550115 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.550170 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.628457 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.628525 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.766136 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ed2a01b2-d44a-48b1-8463-84d5d873b9a7","Type":"ContainerStarted","Data":"a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d"} Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.769508 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8124-account-create-z59nd" event={"ID":"8cffe4dc-e78b-4b0e-a3cc-093777856e62","Type":"ContainerDied","Data":"7f5c834bb2181c7d2b2f27cdfc8b06233e4e2facc4a7ca3a408ad8c6a991fd11"} Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.769546 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f5c834bb2181c7d2b2f27cdfc8b06233e4e2facc4a7ca3a408ad8c6a991fd11" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.769605 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8124-account-create-z59nd" Oct 07 15:08:12 crc kubenswrapper[4672]: I1007 15:08:12.792338 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70296716-492c-452b-8d58-0591749c61f1","Type":"ContainerStarted","Data":"b44316811ed13bd8cd6d57d88f2234960eaa22263a32bcba2699e0fbebd5b53f"} Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.437264 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5189-account-create-d5h2g" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.438395 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-913d-account-create-62bjt" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.528304 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkl5h\" (UniqueName: \"kubernetes.io/projected/ce3ab335-518f-4c6e-8651-894af34abdd7-kube-api-access-hkl5h\") pod \"ce3ab335-518f-4c6e-8651-894af34abdd7\" (UID: \"ce3ab335-518f-4c6e-8651-894af34abdd7\") " Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.528565 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bfsn\" (UniqueName: \"kubernetes.io/projected/3bab44d9-a425-4213-ba3d-58d6a976da25-kube-api-access-7bfsn\") pod \"3bab44d9-a425-4213-ba3d-58d6a976da25\" (UID: \"3bab44d9-a425-4213-ba3d-58d6a976da25\") " Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.535359 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bab44d9-a425-4213-ba3d-58d6a976da25-kube-api-access-7bfsn" (OuterVolumeSpecName: "kube-api-access-7bfsn") pod "3bab44d9-a425-4213-ba3d-58d6a976da25" (UID: "3bab44d9-a425-4213-ba3d-58d6a976da25"). InnerVolumeSpecName "kube-api-access-7bfsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.535456 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce3ab335-518f-4c6e-8651-894af34abdd7-kube-api-access-hkl5h" (OuterVolumeSpecName: "kube-api-access-hkl5h") pod "ce3ab335-518f-4c6e-8651-894af34abdd7" (UID: "ce3ab335-518f-4c6e-8651-894af34abdd7"). InnerVolumeSpecName "kube-api-access-hkl5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.630235 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bfsn\" (UniqueName: \"kubernetes.io/projected/3bab44d9-a425-4213-ba3d-58d6a976da25-kube-api-access-7bfsn\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.630268 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkl5h\" (UniqueName: \"kubernetes.io/projected/ce3ab335-518f-4c6e-8651-894af34abdd7-kube-api-access-hkl5h\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.805340 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5189-account-create-d5h2g" event={"ID":"3bab44d9-a425-4213-ba3d-58d6a976da25","Type":"ContainerDied","Data":"ea3160597ecb863992cb42b4720dbe9c741dfcd94048eb34d4cc176ee09a1d89"} Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.805392 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea3160597ecb863992cb42b4720dbe9c741dfcd94048eb34d4cc176ee09a1d89" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.805452 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5189-account-create-d5h2g" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.811624 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ed2a01b2-d44a-48b1-8463-84d5d873b9a7","Type":"ContainerStarted","Data":"ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649"} Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.815119 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70296716-492c-452b-8d58-0591749c61f1","Type":"ContainerStarted","Data":"c7f77eb1b8370e6a8572b2ab38b36b61b5604f5c74127e666a98d354b1cf10f8"} Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.823309 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.824102 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-913d-account-create-62bjt" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.824483 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-913d-account-create-62bjt" event={"ID":"ce3ab335-518f-4c6e-8651-894af34abdd7","Type":"ContainerDied","Data":"8a5f5c02171c66fc4050efd478fe5a7e543734a02d05d02c6c77c6d355a5cbed"} Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.824515 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a5f5c02171c66fc4050efd478fe5a7e543734a02d05d02c6c77c6d355a5cbed" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.845191 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.845169898 podStartE2EDuration="4.845169898s" podCreationTimestamp="2025-10-07 15:08:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:13.832916906 +0000 UTC m=+1170.808095497" watchObservedRunningTime="2025-10-07 15:08:13.845169898 +0000 UTC m=+1170.820348479" Oct 07 15:08:13 crc kubenswrapper[4672]: I1007 15:08:13.878575 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.878554517 podStartE2EDuration="3.878554517s" podCreationTimestamp="2025-10-07 15:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:13.870631029 +0000 UTC m=+1170.845809610" watchObservedRunningTime="2025-10-07 15:08:13.878554517 +0000 UTC m=+1170.853733098" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.052984 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.299505 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-4ss8w"] Oct 07 15:08:14 crc kubenswrapper[4672]: E1007 15:08:14.299890 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cffe4dc-e78b-4b0e-a3cc-093777856e62" containerName="mariadb-account-create" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.299914 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cffe4dc-e78b-4b0e-a3cc-093777856e62" containerName="mariadb-account-create" Oct 07 15:08:14 crc kubenswrapper[4672]: E1007 15:08:14.299965 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce3ab335-518f-4c6e-8651-894af34abdd7" containerName="mariadb-account-create" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.299974 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce3ab335-518f-4c6e-8651-894af34abdd7" containerName="mariadb-account-create" Oct 07 15:08:14 crc kubenswrapper[4672]: E1007 15:08:14.299990 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bab44d9-a425-4213-ba3d-58d6a976da25" containerName="mariadb-account-create" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.299998 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bab44d9-a425-4213-ba3d-58d6a976da25" containerName="mariadb-account-create" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.300312 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce3ab335-518f-4c6e-8651-894af34abdd7" containerName="mariadb-account-create" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.300340 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cffe4dc-e78b-4b0e-a3cc-093777856e62" containerName="mariadb-account-create" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.300363 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bab44d9-a425-4213-ba3d-58d6a976da25" containerName="mariadb-account-create" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.302418 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.304770 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-lvwkv" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.310202 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.312939 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-4ss8w"] Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.460373 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9tn5\" (UniqueName: \"kubernetes.io/projected/1510beef-3f92-4462-90a7-e1b2a92df211-kube-api-access-r9tn5\") pod \"barbican-db-sync-4ss8w\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.460666 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-db-sync-config-data\") pod \"barbican-db-sync-4ss8w\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.460738 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-combined-ca-bundle\") pod \"barbican-db-sync-4ss8w\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.562121 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-db-sync-config-data\") pod \"barbican-db-sync-4ss8w\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.562224 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-combined-ca-bundle\") pod \"barbican-db-sync-4ss8w\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.562260 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9tn5\" (UniqueName: \"kubernetes.io/projected/1510beef-3f92-4462-90a7-e1b2a92df211-kube-api-access-r9tn5\") pod \"barbican-db-sync-4ss8w\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.580529 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-db-sync-config-data\") pod \"barbican-db-sync-4ss8w\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.581046 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-combined-ca-bundle\") pod \"barbican-db-sync-4ss8w\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.585971 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9tn5\" (UniqueName: \"kubernetes.io/projected/1510beef-3f92-4462-90a7-e1b2a92df211-kube-api-access-r9tn5\") pod \"barbican-db-sync-4ss8w\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:14 crc kubenswrapper[4672]: I1007 15:08:14.668244 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:15 crc kubenswrapper[4672]: I1007 15:08:15.184268 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-4ss8w"] Oct 07 15:08:15 crc kubenswrapper[4672]: I1007 15:08:15.857525 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4ss8w" event={"ID":"1510beef-3f92-4462-90a7-e1b2a92df211","Type":"ContainerStarted","Data":"e9933eefc0863b46baeaa15e235bcf930f043f9cef2457c3805942450e0c9c93"} Oct 07 15:08:16 crc kubenswrapper[4672]: I1007 15:08:16.206354 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:08:16 crc kubenswrapper[4672]: I1007 15:08:16.868287 4672 generic.go:334] "Generic (PLEG): container finished" podID="e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" containerID="bd18ea38c2e6d705d39b63cbf64b2d7043480f4e99d32e92a6091f4966d691e2" exitCode=0 Oct 07 15:08:16 crc kubenswrapper[4672]: I1007 15:08:16.868351 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lsj7z" event={"ID":"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b","Type":"ContainerDied","Data":"bd18ea38c2e6d705d39b63cbf64b2d7043480f4e99d32e92a6091f4966d691e2"} Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.619788 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.749369 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-combined-ca-bundle\") pod \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.749701 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-fernet-keys\") pod \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.749811 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-credential-keys\") pod \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.749845 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-scripts\") pod \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.749958 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcd9p\" (UniqueName: \"kubernetes.io/projected/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-kube-api-access-jcd9p\") pod \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.750073 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-config-data\") pod \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\" (UID: \"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b\") " Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.756322 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-kube-api-access-jcd9p" (OuterVolumeSpecName: "kube-api-access-jcd9p") pod "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" (UID: "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b"). InnerVolumeSpecName "kube-api-access-jcd9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.756402 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-scripts" (OuterVolumeSpecName: "scripts") pod "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" (UID: "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.760092 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" (UID: "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.781710 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" (UID: "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.788264 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-config-data" (OuterVolumeSpecName: "config-data") pod "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" (UID: "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.813113 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" (UID: "e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.855221 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcd9p\" (UniqueName: \"kubernetes.io/projected/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-kube-api-access-jcd9p\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.855264 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.855277 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.855289 4672 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.855301 4672 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.855312 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.900222 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lsj7z" event={"ID":"e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b","Type":"ContainerDied","Data":"f19060b4ed24a4ec64618abdd60a84612e19f487756293d04cade2368100707b"} Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.900274 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f19060b4ed24a4ec64618abdd60a84612e19f487756293d04cade2368100707b" Oct 07 15:08:18 crc kubenswrapper[4672]: I1007 15:08:18.900350 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lsj7z" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.037756 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5858475b48-6sdfp"] Oct 07 15:08:19 crc kubenswrapper[4672]: E1007 15:08:19.038164 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" containerName="keystone-bootstrap" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.038183 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" containerName="keystone-bootstrap" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.038393 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" containerName="keystone-bootstrap" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.038979 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.042947 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.043130 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-rtlqr" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.043205 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.043307 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.043372 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.048730 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.062899 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5858475b48-6sdfp"] Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.161982 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-public-tls-certs\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.162178 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-scripts\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.162236 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb49n\" (UniqueName: \"kubernetes.io/projected/718ff39a-552f-48a0-91f4-c9fa07c17b7d-kube-api-access-bb49n\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.162265 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-config-data\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.162335 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-fernet-keys\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.162491 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-combined-ca-bundle\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.162542 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-internal-tls-certs\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.162625 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-credential-keys\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.264118 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-fernet-keys\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.264186 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-combined-ca-bundle\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.264206 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-internal-tls-certs\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.264235 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-credential-keys\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.264308 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-public-tls-certs\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.264335 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-scripts\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.264356 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb49n\" (UniqueName: \"kubernetes.io/projected/718ff39a-552f-48a0-91f4-c9fa07c17b7d-kube-api-access-bb49n\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.264374 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-config-data\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.272450 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-internal-tls-certs\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.283643 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-scripts\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.284108 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-fernet-keys\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.286645 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-config-data\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.291943 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-credential-keys\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.295237 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-public-tls-certs\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.298531 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb49n\" (UniqueName: \"kubernetes.io/projected/718ff39a-552f-48a0-91f4-c9fa07c17b7d-kube-api-access-bb49n\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.299113 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/718ff39a-552f-48a0-91f4-c9fa07c17b7d-combined-ca-bundle\") pod \"keystone-5858475b48-6sdfp\" (UID: \"718ff39a-552f-48a0-91f4-c9fa07c17b7d\") " pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.366288 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.413840 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-2j8d8"] Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.427225 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.431001 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.431265 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-cxgrr" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.434002 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.472054 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-2j8d8"] Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.574151 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-config-data\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.574233 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-combined-ca-bundle\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.574262 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-scripts\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.574278 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-etc-machine-id\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.574305 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-db-sync-config-data\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.574359 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b268t\" (UniqueName: \"kubernetes.io/projected/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-kube-api-access-b268t\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.675384 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-db-sync-config-data\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.675468 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b268t\" (UniqueName: \"kubernetes.io/projected/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-kube-api-access-b268t\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.675526 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-config-data\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.675571 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-combined-ca-bundle\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.675597 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-scripts\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.675612 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-etc-machine-id\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.675682 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-etc-machine-id\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.680429 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-config-data\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.686370 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-combined-ca-bundle\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.688533 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-db-sync-config-data\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.701525 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-scripts\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.701938 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-kqgxx"] Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.710086 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b268t\" (UniqueName: \"kubernetes.io/projected/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-kube-api-access-b268t\") pod \"cinder-db-sync-2j8d8\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.716463 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.727163 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.727364 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.727466 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-dr29t" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.752308 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kqgxx"] Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.760496 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.780257 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxlf4\" (UniqueName: \"kubernetes.io/projected/62307f94-c8e8-4781-a675-0951f6e1d797-kube-api-access-xxlf4\") pod \"neutron-db-sync-kqgxx\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.780323 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-config\") pod \"neutron-db-sync-kqgxx\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.780365 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-combined-ca-bundle\") pod \"neutron-db-sync-kqgxx\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.886985 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-config\") pod \"neutron-db-sync-kqgxx\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.887077 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-combined-ca-bundle\") pod \"neutron-db-sync-kqgxx\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.887184 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxlf4\" (UniqueName: \"kubernetes.io/projected/62307f94-c8e8-4781-a675-0951f6e1d797-kube-api-access-xxlf4\") pod \"neutron-db-sync-kqgxx\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.892179 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-config\") pod \"neutron-db-sync-kqgxx\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.916123 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-combined-ca-bundle\") pod \"neutron-db-sync-kqgxx\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:19 crc kubenswrapper[4672]: I1007 15:08:19.921376 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxlf4\" (UniqueName: \"kubernetes.io/projected/62307f94-c8e8-4781-a675-0951f6e1d797-kube-api-access-xxlf4\") pod \"neutron-db-sync-kqgxx\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.151471 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.316182 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.317234 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.357575 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.379535 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.395230 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.395274 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.431344 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.443801 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.938847 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.938881 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.938891 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 15:08:20 crc kubenswrapper[4672]: I1007 15:08:20.938979 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:22 crc kubenswrapper[4672]: I1007 15:08:22.553225 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-bbdf8cc6b-btjlc" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Oct 07 15:08:22 crc kubenswrapper[4672]: I1007 15:08:22.629790 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-694678548d-bbtxt" podUID="d3c1e726-5fce-4f95-952f-effb9a8993f3" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 07 15:08:22 crc kubenswrapper[4672]: I1007 15:08:22.974065 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 15:08:22 crc kubenswrapper[4672]: I1007 15:08:22.974106 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 15:08:22 crc kubenswrapper[4672]: I1007 15:08:22.974370 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 15:08:22 crc kubenswrapper[4672]: I1007 15:08:22.974388 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 15:08:23 crc kubenswrapper[4672]: I1007 15:08:23.329767 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 15:08:23 crc kubenswrapper[4672]: I1007 15:08:23.363327 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:23 crc kubenswrapper[4672]: I1007 15:08:23.364232 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 15:08:23 crc kubenswrapper[4672]: I1007 15:08:23.983487 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 15:08:24 crc kubenswrapper[4672]: I1007 15:08:24.072286 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 15:08:25 crc kubenswrapper[4672]: I1007 15:08:25.675006 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5858475b48-6sdfp"] Oct 07 15:08:25 crc kubenswrapper[4672]: I1007 15:08:25.773930 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-2j8d8"] Oct 07 15:08:25 crc kubenswrapper[4672]: I1007 15:08:25.789272 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kqgxx"] Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.013614 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5858475b48-6sdfp" event={"ID":"718ff39a-552f-48a0-91f4-c9fa07c17b7d","Type":"ContainerStarted","Data":"5c32146a6947cf9f52060b9b1a386f4d22ba67143495abdfc31c31c6b9348ff1"} Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.013669 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5858475b48-6sdfp" event={"ID":"718ff39a-552f-48a0-91f4-c9fa07c17b7d","Type":"ContainerStarted","Data":"81f7b6261c8c22cf5367d8375d498de40cf75d4103dab17e6105a783d0614824"} Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.015443 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.022875 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lzmcv" event={"ID":"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017","Type":"ContainerStarted","Data":"ce7a7e486044de9cef0757c05694a39f331e017ca86ae3981012256e0b58fb74"} Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.028462 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41","Type":"ContainerStarted","Data":"01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01"} Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.045254 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5858475b48-6sdfp" podStartSLOduration=7.045239323 podStartE2EDuration="7.045239323s" podCreationTimestamp="2025-10-07 15:08:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:26.043181734 +0000 UTC m=+1183.018360325" watchObservedRunningTime="2025-10-07 15:08:26.045239323 +0000 UTC m=+1183.020417904" Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.058262 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kqgxx" event={"ID":"62307f94-c8e8-4781-a675-0951f6e1d797","Type":"ContainerStarted","Data":"1a4e5673d6583ad7186236bcbf7333a925a476d5324cd4f5658e8d2c1e5d8fbb"} Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.072956 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-lzmcv" podStartSLOduration=3.3684253379999998 podStartE2EDuration="33.072939388s" podCreationTimestamp="2025-10-07 15:07:53 +0000 UTC" firstStartedPulling="2025-10-07 15:07:55.454354145 +0000 UTC m=+1152.429532726" lastFinishedPulling="2025-10-07 15:08:25.158868195 +0000 UTC m=+1182.134046776" observedRunningTime="2025-10-07 15:08:26.069108958 +0000 UTC m=+1183.044287539" watchObservedRunningTime="2025-10-07 15:08:26.072939388 +0000 UTC m=+1183.048117969" Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.073302 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2j8d8" event={"ID":"1d702410-d569-45e2-9f92-b8e0c7a0fd9d","Type":"ContainerStarted","Data":"317c2370a8392259342bee7c8d70e2ff0c3fc81cc68632f544cbe4a1a02b76f0"} Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.089012 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4ss8w" event={"ID":"1510beef-3f92-4462-90a7-e1b2a92df211","Type":"ContainerStarted","Data":"9e8c3b07eb4f9979b6ff8a8d1b44efa11edb03adfb18e7e22b48a3b8a0aab6b7"} Oct 07 15:08:26 crc kubenswrapper[4672]: I1007 15:08:26.109872 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-4ss8w" podStartSLOduration=2.112570722 podStartE2EDuration="12.109853079s" podCreationTimestamp="2025-10-07 15:08:14 +0000 UTC" firstStartedPulling="2025-10-07 15:08:15.201919356 +0000 UTC m=+1172.177097937" lastFinishedPulling="2025-10-07 15:08:25.199201713 +0000 UTC m=+1182.174380294" observedRunningTime="2025-10-07 15:08:26.107755808 +0000 UTC m=+1183.082934389" watchObservedRunningTime="2025-10-07 15:08:26.109853079 +0000 UTC m=+1183.085031660" Oct 07 15:08:27 crc kubenswrapper[4672]: I1007 15:08:27.104770 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kqgxx" event={"ID":"62307f94-c8e8-4781-a675-0951f6e1d797","Type":"ContainerStarted","Data":"e65a67703af4d8814c91d9faab26e202d32eb34eaac035fb55fbafa0fb73452f"} Oct 07 15:08:27 crc kubenswrapper[4672]: I1007 15:08:27.128710 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-kqgxx" podStartSLOduration=8.128660831 podStartE2EDuration="8.128660831s" podCreationTimestamp="2025-10-07 15:08:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:27.118436927 +0000 UTC m=+1184.093615508" watchObservedRunningTime="2025-10-07 15:08:27.128660831 +0000 UTC m=+1184.103839412" Oct 07 15:08:29 crc kubenswrapper[4672]: I1007 15:08:29.132532 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4ss8w" event={"ID":"1510beef-3f92-4462-90a7-e1b2a92df211","Type":"ContainerDied","Data":"9e8c3b07eb4f9979b6ff8a8d1b44efa11edb03adfb18e7e22b48a3b8a0aab6b7"} Oct 07 15:08:29 crc kubenswrapper[4672]: I1007 15:08:29.132614 4672 generic.go:334] "Generic (PLEG): container finished" podID="1510beef-3f92-4462-90a7-e1b2a92df211" containerID="9e8c3b07eb4f9979b6ff8a8d1b44efa11edb03adfb18e7e22b48a3b8a0aab6b7" exitCode=0 Oct 07 15:08:29 crc kubenswrapper[4672]: I1007 15:08:29.135233 4672 generic.go:334] "Generic (PLEG): container finished" podID="8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" containerID="ce7a7e486044de9cef0757c05694a39f331e017ca86ae3981012256e0b58fb74" exitCode=0 Oct 07 15:08:29 crc kubenswrapper[4672]: I1007 15:08:29.135266 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lzmcv" event={"ID":"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017","Type":"ContainerDied","Data":"ce7a7e486044de9cef0757c05694a39f331e017ca86ae3981012256e0b58fb74"} Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.551095 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-bbdf8cc6b-btjlc" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.628592 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-694678548d-bbtxt" podUID="d3c1e726-5fce-4f95-952f-effb9a8993f3" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.694334 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lzmcv" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.788673 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8g8b\" (UniqueName: \"kubernetes.io/projected/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-kube-api-access-l8g8b\") pod \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.788803 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-logs\") pod \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.789262 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-combined-ca-bundle\") pod \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.789442 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-logs" (OuterVolumeSpecName: "logs") pod "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" (UID: "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.790091 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-scripts\") pod \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.790152 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-config-data\") pod \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\" (UID: \"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017\") " Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.791067 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.797156 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-kube-api-access-l8g8b" (OuterVolumeSpecName: "kube-api-access-l8g8b") pod "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" (UID: "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017"). InnerVolumeSpecName "kube-api-access-l8g8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.811131 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-scripts" (OuterVolumeSpecName: "scripts") pod "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" (UID: "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.823847 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-config-data" (OuterVolumeSpecName: "config-data") pod "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" (UID: "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.837182 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" (UID: "8c3d2245-20b7-4ef1-a2ba-9548d4c5d017"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.892930 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8g8b\" (UniqueName: \"kubernetes.io/projected/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-kube-api-access-l8g8b\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.892970 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.892983 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:32 crc kubenswrapper[4672]: I1007 15:08:32.892993 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.172323 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lzmcv" event={"ID":"8c3d2245-20b7-4ef1-a2ba-9548d4c5d017","Type":"ContainerDied","Data":"e397c78d10d3edf75ff896d5070aa79bd0bfe32a95cc95743b15528bb2383525"} Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.172365 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e397c78d10d3edf75ff896d5070aa79bd0bfe32a95cc95743b15528bb2383525" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.172423 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lzmcv" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.807012 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-78768fbc98-wqtlt"] Oct 07 15:08:33 crc kubenswrapper[4672]: E1007 15:08:33.807438 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" containerName="placement-db-sync" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.807449 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" containerName="placement-db-sync" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.807619 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" containerName="placement-db-sync" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.808547 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.814391 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.814660 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.814765 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.815251 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-hb27h" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.815572 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.822462 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-78768fbc98-wqtlt"] Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.917907 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-scripts\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.918056 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-combined-ca-bundle\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.918108 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tjdt\" (UniqueName: \"kubernetes.io/projected/8ee6c49e-c514-4798-ab81-0bc31c59048d-kube-api-access-6tjdt\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.918142 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-config-data\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.918177 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-internal-tls-certs\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.918195 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ee6c49e-c514-4798-ab81-0bc31c59048d-logs\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:33 crc kubenswrapper[4672]: I1007 15:08:33.918218 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-public-tls-certs\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.021086 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-public-tls-certs\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.021253 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-scripts\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.021378 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-combined-ca-bundle\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.021437 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tjdt\" (UniqueName: \"kubernetes.io/projected/8ee6c49e-c514-4798-ab81-0bc31c59048d-kube-api-access-6tjdt\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.021494 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-config-data\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.021561 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-internal-tls-certs\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.021591 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ee6c49e-c514-4798-ab81-0bc31c59048d-logs\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.022547 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ee6c49e-c514-4798-ab81-0bc31c59048d-logs\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.025043 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-public-tls-certs\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.032695 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-internal-tls-certs\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.035901 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-config-data\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.040122 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-scripts\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.046443 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ee6c49e-c514-4798-ab81-0bc31c59048d-combined-ca-bundle\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.046983 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tjdt\" (UniqueName: \"kubernetes.io/projected/8ee6c49e-c514-4798-ab81-0bc31c59048d-kube-api-access-6tjdt\") pod \"placement-78768fbc98-wqtlt\" (UID: \"8ee6c49e-c514-4798-ab81-0bc31c59048d\") " pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:34 crc kubenswrapper[4672]: I1007 15:08:34.138327 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:39 crc kubenswrapper[4672]: I1007 15:08:39.998734 4672 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podde394e3e-48ba-4410-8fb2-a99e03f27ac9"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podde394e3e-48ba-4410-8fb2-a99e03f27ac9] : Timed out while waiting for systemd to remove kubepods-besteffort-podde394e3e_48ba_4410_8fb2_a99e03f27ac9.slice" Oct 07 15:08:40 crc kubenswrapper[4672]: E1007 15:08:39.999513 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort podde394e3e-48ba-4410-8fb2-a99e03f27ac9] : unable to destroy cgroup paths for cgroup [kubepods besteffort podde394e3e-48ba-4410-8fb2-a99e03f27ac9] : Timed out while waiting for systemd to remove kubepods-besteffort-podde394e3e_48ba_4410_8fb2_a99e03f27ac9.slice" pod="openstack/dnsmasq-dns-5988746689-84wxb" podUID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" Oct 07 15:08:40 crc kubenswrapper[4672]: I1007 15:08:40.244426 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5988746689-84wxb" Oct 07 15:08:40 crc kubenswrapper[4672]: I1007 15:08:40.275956 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5988746689-84wxb"] Oct 07 15:08:40 crc kubenswrapper[4672]: I1007 15:08:40.282934 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5988746689-84wxb"] Oct 07 15:08:41 crc kubenswrapper[4672]: I1007 15:08:41.278264 4672 generic.go:334] "Generic (PLEG): container finished" podID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerID="b69fa2adc76165439524c5edcd2cdf714c8f2f6e20caf17158f4b5141adfa2b5" exitCode=137 Oct 07 15:08:41 crc kubenswrapper[4672]: I1007 15:08:41.278662 4672 generic.go:334] "Generic (PLEG): container finished" podID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerID="e2ffa69194e8cdaf98a5bc6c34739cd71dbd67aa5ccab0b47cd75822d7680dec" exitCode=137 Oct 07 15:08:41 crc kubenswrapper[4672]: I1007 15:08:41.278511 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-796c559987-ttpm4" event={"ID":"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d","Type":"ContainerDied","Data":"b69fa2adc76165439524c5edcd2cdf714c8f2f6e20caf17158f4b5141adfa2b5"} Oct 07 15:08:41 crc kubenswrapper[4672]: I1007 15:08:41.278702 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-796c559987-ttpm4" event={"ID":"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d","Type":"ContainerDied","Data":"e2ffa69194e8cdaf98a5bc6c34739cd71dbd67aa5ccab0b47cd75822d7680dec"} Oct 07 15:08:41 crc kubenswrapper[4672]: I1007 15:08:41.904812 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de394e3e-48ba-4410-8fb2-a99e03f27ac9" path="/var/lib/kubelet/pods/de394e3e-48ba-4410-8fb2-a99e03f27ac9/volumes" Oct 07 15:08:42 crc kubenswrapper[4672]: I1007 15:08:42.293518 4672 generic.go:334] "Generic (PLEG): container finished" podID="05eff0ec-546c-4c65-95da-51a499040af2" containerID="a0f8e90e22fa289c6355eeebf4e277ecc3be56cdcc7c15e3d0b962fdc6de8090" exitCode=137 Oct 07 15:08:42 crc kubenswrapper[4672]: I1007 15:08:42.293971 4672 generic.go:334] "Generic (PLEG): container finished" podID="05eff0ec-546c-4c65-95da-51a499040af2" containerID="17ad6da9eb58157ef7daa0f3c7349469bee4d16cfa20b1fd2203dd5c3e39c216" exitCode=137 Oct 07 15:08:42 crc kubenswrapper[4672]: I1007 15:08:42.294067 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66c969bf9c-mwvhc" event={"ID":"05eff0ec-546c-4c65-95da-51a499040af2","Type":"ContainerDied","Data":"a0f8e90e22fa289c6355eeebf4e277ecc3be56cdcc7c15e3d0b962fdc6de8090"} Oct 07 15:08:42 crc kubenswrapper[4672]: I1007 15:08:42.294105 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66c969bf9c-mwvhc" event={"ID":"05eff0ec-546c-4c65-95da-51a499040af2","Type":"ContainerDied","Data":"17ad6da9eb58157ef7daa0f3c7349469bee4d16cfa20b1fd2203dd5c3e39c216"} Oct 07 15:08:42 crc kubenswrapper[4672]: I1007 15:08:42.296872 4672 generic.go:334] "Generic (PLEG): container finished" podID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerID="50cc0512e53bcab239f8340589616507cfe294f794a7ce10fd5eb9a7553d3527" exitCode=137 Oct 07 15:08:42 crc kubenswrapper[4672]: I1007 15:08:42.296916 4672 generic.go:334] "Generic (PLEG): container finished" podID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerID="8e8584976358a6e50ddc0fc90a3a450f81a6e29d6a48d729a0aa8754312b31d8" exitCode=137 Oct 07 15:08:42 crc kubenswrapper[4672]: I1007 15:08:42.296944 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5fcb5df989-stdqv" event={"ID":"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b","Type":"ContainerDied","Data":"50cc0512e53bcab239f8340589616507cfe294f794a7ce10fd5eb9a7553d3527"} Oct 07 15:08:42 crc kubenswrapper[4672]: I1007 15:08:42.297007 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5fcb5df989-stdqv" event={"ID":"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b","Type":"ContainerDied","Data":"8e8584976358a6e50ddc0fc90a3a450f81a6e29d6a48d729a0aa8754312b31d8"} Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.315556 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.320775 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4ss8w" event={"ID":"1510beef-3f92-4462-90a7-e1b2a92df211","Type":"ContainerDied","Data":"e9933eefc0863b46baeaa15e235bcf930f043f9cef2457c3805942450e0c9c93"} Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.320808 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9933eefc0863b46baeaa15e235bcf930f043f9cef2457c3805942450e0c9c93" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.320817 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4ss8w" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.438878 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-db-sync-config-data\") pod \"1510beef-3f92-4462-90a7-e1b2a92df211\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.439162 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-combined-ca-bundle\") pod \"1510beef-3f92-4462-90a7-e1b2a92df211\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.439201 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9tn5\" (UniqueName: \"kubernetes.io/projected/1510beef-3f92-4462-90a7-e1b2a92df211-kube-api-access-r9tn5\") pod \"1510beef-3f92-4462-90a7-e1b2a92df211\" (UID: \"1510beef-3f92-4462-90a7-e1b2a92df211\") " Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.446218 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1510beef-3f92-4462-90a7-e1b2a92df211" (UID: "1510beef-3f92-4462-90a7-e1b2a92df211"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.462669 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1510beef-3f92-4462-90a7-e1b2a92df211-kube-api-access-r9tn5" (OuterVolumeSpecName: "kube-api-access-r9tn5") pod "1510beef-3f92-4462-90a7-e1b2a92df211" (UID: "1510beef-3f92-4462-90a7-e1b2a92df211"). InnerVolumeSpecName "kube-api-access-r9tn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.465732 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.469308 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1510beef-3f92-4462-90a7-e1b2a92df211" (UID: "1510beef-3f92-4462-90a7-e1b2a92df211"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.516807 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.541613 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.541655 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9tn5\" (UniqueName: \"kubernetes.io/projected/1510beef-3f92-4462-90a7-e1b2a92df211-kube-api-access-r9tn5\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:44 crc kubenswrapper[4672]: I1007 15:08:44.541670 4672 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1510beef-3f92-4462-90a7-e1b2a92df211-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.599756 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-f4cdcdbb-4hkrg"] Oct 07 15:08:45 crc kubenswrapper[4672]: E1007 15:08:45.600865 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1510beef-3f92-4462-90a7-e1b2a92df211" containerName="barbican-db-sync" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.600888 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1510beef-3f92-4462-90a7-e1b2a92df211" containerName="barbican-db-sync" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.605483 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="1510beef-3f92-4462-90a7-e1b2a92df211" containerName="barbican-db-sync" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.606987 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.618273 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.618405 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-lvwkv" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.619035 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.630883 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-fbf4c46f7-rxrcl"] Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.632535 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.641872 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.652447 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-fbf4c46f7-rxrcl"] Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.665950 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd7a9076-c512-4e9c-b626-482122ee920b-config-data\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.666005 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dff22\" (UniqueName: \"kubernetes.io/projected/dd7a9076-c512-4e9c-b626-482122ee920b-kube-api-access-dff22\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.666112 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84rfl\" (UniqueName: \"kubernetes.io/projected/79138985-a31b-43fd-aec5-cfd8abf453d1-kube-api-access-84rfl\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.666148 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd7a9076-c512-4e9c-b626-482122ee920b-config-data-custom\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.666166 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79138985-a31b-43fd-aec5-cfd8abf453d1-combined-ca-bundle\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.666193 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79138985-a31b-43fd-aec5-cfd8abf453d1-config-data-custom\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.666229 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79138985-a31b-43fd-aec5-cfd8abf453d1-logs\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.666246 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd7a9076-c512-4e9c-b626-482122ee920b-logs\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.666278 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79138985-a31b-43fd-aec5-cfd8abf453d1-config-data\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.666313 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7a9076-c512-4e9c-b626-482122ee920b-combined-ca-bundle\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.677275 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-f4cdcdbb-4hkrg"] Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.731361 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b7dc7f747-w25nx"] Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.733181 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.746541 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b7dc7f747-w25nx"] Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768513 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7a9076-c512-4e9c-b626-482122ee920b-combined-ca-bundle\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768576 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd7a9076-c512-4e9c-b626-482122ee920b-config-data\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768603 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dff22\" (UniqueName: \"kubernetes.io/projected/dd7a9076-c512-4e9c-b626-482122ee920b-kube-api-access-dff22\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768620 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84rfl\" (UniqueName: \"kubernetes.io/projected/79138985-a31b-43fd-aec5-cfd8abf453d1-kube-api-access-84rfl\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768648 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd7a9076-c512-4e9c-b626-482122ee920b-config-data-custom\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768666 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79138985-a31b-43fd-aec5-cfd8abf453d1-combined-ca-bundle\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768692 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79138985-a31b-43fd-aec5-cfd8abf453d1-config-data-custom\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768727 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79138985-a31b-43fd-aec5-cfd8abf453d1-logs\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768746 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd7a9076-c512-4e9c-b626-482122ee920b-logs\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.768771 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79138985-a31b-43fd-aec5-cfd8abf453d1-config-data\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.781561 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79138985-a31b-43fd-aec5-cfd8abf453d1-logs\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.782254 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd7a9076-c512-4e9c-b626-482122ee920b-logs\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.782585 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79138985-a31b-43fd-aec5-cfd8abf453d1-config-data\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.783291 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79138985-a31b-43fd-aec5-cfd8abf453d1-combined-ca-bundle\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.784939 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd7a9076-c512-4e9c-b626-482122ee920b-config-data-custom\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.801189 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7a9076-c512-4e9c-b626-482122ee920b-combined-ca-bundle\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.802056 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd7a9076-c512-4e9c-b626-482122ee920b-config-data\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.802904 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79138985-a31b-43fd-aec5-cfd8abf453d1-config-data-custom\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.805467 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84rfl\" (UniqueName: \"kubernetes.io/projected/79138985-a31b-43fd-aec5-cfd8abf453d1-kube-api-access-84rfl\") pod \"barbican-worker-fbf4c46f7-rxrcl\" (UID: \"79138985-a31b-43fd-aec5-cfd8abf453d1\") " pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.816615 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dff22\" (UniqueName: \"kubernetes.io/projected/dd7a9076-c512-4e9c-b626-482122ee920b-kube-api-access-dff22\") pod \"barbican-keystone-listener-f4cdcdbb-4hkrg\" (UID: \"dd7a9076-c512-4e9c-b626-482122ee920b\") " pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.872765 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-sb\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.873179 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74rcs\" (UniqueName: \"kubernetes.io/projected/b0ca4fb8-e017-4010-ac17-07820394dc2b-kube-api-access-74rcs\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.873333 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-svc\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.873562 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-config\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.873806 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-swift-storage-0\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.873950 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-nb\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.891005 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-657f8cc4c6-kbnsj"] Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.892551 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.895981 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.918531 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-657f8cc4c6-kbnsj"] Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.950210 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.975036 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-nb\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.975555 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-combined-ca-bundle\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.975601 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797ac265-2c36-43ef-9e70-862fde9bacc5-logs\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.975635 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr9mg\" (UniqueName: \"kubernetes.io/projected/797ac265-2c36-43ef-9e70-862fde9bacc5-kube-api-access-tr9mg\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.975873 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-sb\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.975903 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74rcs\" (UniqueName: \"kubernetes.io/projected/b0ca4fb8-e017-4010-ac17-07820394dc2b-kube-api-access-74rcs\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.975933 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-svc\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.975967 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.975993 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-config\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.976128 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data-custom\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.976155 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-swift-storage-0\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.976715 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-sb\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.977179 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-nb\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.977351 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-config\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.977606 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-svc\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.977629 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-swift-storage-0\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.993174 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-fbf4c46f7-rxrcl" Oct 07 15:08:45 crc kubenswrapper[4672]: I1007 15:08:45.993921 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74rcs\" (UniqueName: \"kubernetes.io/projected/b0ca4fb8-e017-4010-ac17-07820394dc2b-kube-api-access-74rcs\") pod \"dnsmasq-dns-7b7dc7f747-w25nx\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.054468 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.077531 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-combined-ca-bundle\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.077845 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797ac265-2c36-43ef-9e70-862fde9bacc5-logs\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.078007 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr9mg\" (UniqueName: \"kubernetes.io/projected/797ac265-2c36-43ef-9e70-862fde9bacc5-kube-api-access-tr9mg\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.078288 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.078453 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data-custom\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.078524 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797ac265-2c36-43ef-9e70-862fde9bacc5-logs\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.084332 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-combined-ca-bundle\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.085028 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data-custom\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.087525 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.099630 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr9mg\" (UniqueName: \"kubernetes.io/projected/797ac265-2c36-43ef-9e70-862fde9bacc5-kube-api-access-tr9mg\") pod \"barbican-api-657f8cc4c6-kbnsj\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.225278 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.379896 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.461669 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-694678548d-bbtxt" Oct 07 15:08:46 crc kubenswrapper[4672]: I1007 15:08:46.573976 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-bbdf8cc6b-btjlc"] Oct 07 15:08:47 crc kubenswrapper[4672]: I1007 15:08:47.349568 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-bbdf8cc6b-btjlc" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon-log" containerID="cri-o://3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd" gracePeriod=30 Oct 07 15:08:47 crc kubenswrapper[4672]: I1007 15:08:47.350055 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-bbdf8cc6b-btjlc" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon" containerID="cri-o://f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc" gracePeriod=30 Oct 07 15:08:47 crc kubenswrapper[4672]: E1007 15:08:47.552273 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-cinder-api:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:08:47 crc kubenswrapper[4672]: E1007 15:08:47.552826 4672 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-cinder-api:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:08:47 crc kubenswrapper[4672]: E1007 15:08:47.552937 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:38.102.83.103:5001/podified-antelope-centos9/openstack-cinder-api:b78cfc68a577b1553523c8a70a34e297,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b268t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-2j8d8_openstack(1d702410-d569-45e2-9f92-b8e0c7a0fd9d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 15:08:47 crc kubenswrapper[4672]: E1007 15:08:47.554963 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-2j8d8" podUID="1d702410-d569-45e2-9f92-b8e0c7a0fd9d" Oct 07 15:08:47 crc kubenswrapper[4672]: E1007 15:08:47.897744 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.042407 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.050245 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.058452 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134256 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-scripts\") pod \"05eff0ec-546c-4c65-95da-51a499040af2\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134303 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-config-data\") pod \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134421 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-horizon-secret-key\") pod \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134560 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-logs\") pod \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134617 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk9ll\" (UniqueName: \"kubernetes.io/projected/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-kube-api-access-jk9ll\") pod \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134665 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-scripts\") pod \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134711 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-config-data\") pod \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134758 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-horizon-secret-key\") pod \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134790 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpz7x\" (UniqueName: \"kubernetes.io/projected/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-kube-api-access-vpz7x\") pod \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\" (UID: \"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134859 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/05eff0ec-546c-4c65-95da-51a499040af2-horizon-secret-key\") pod \"05eff0ec-546c-4c65-95da-51a499040af2\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134919 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eff0ec-546c-4c65-95da-51a499040af2-logs\") pod \"05eff0ec-546c-4c65-95da-51a499040af2\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134949 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-config-data\") pod \"05eff0ec-546c-4c65-95da-51a499040af2\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.134994 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cjlb\" (UniqueName: \"kubernetes.io/projected/05eff0ec-546c-4c65-95da-51a499040af2-kube-api-access-5cjlb\") pod \"05eff0ec-546c-4c65-95da-51a499040af2\" (UID: \"05eff0ec-546c-4c65-95da-51a499040af2\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.135047 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-scripts\") pod \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.135077 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-logs\") pod \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\" (UID: \"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b\") " Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.136553 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-logs" (OuterVolumeSpecName: "logs") pod "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" (UID: "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.137011 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05eff0ec-546c-4c65-95da-51a499040af2-logs" (OuterVolumeSpecName: "logs") pod "05eff0ec-546c-4c65-95da-51a499040af2" (UID: "05eff0ec-546c-4c65-95da-51a499040af2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.137445 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-logs" (OuterVolumeSpecName: "logs") pod "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" (UID: "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.144797 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-kube-api-access-jk9ll" (OuterVolumeSpecName: "kube-api-access-jk9ll") pod "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" (UID: "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b"). InnerVolumeSpecName "kube-api-access-jk9ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.150624 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" (UID: "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.151274 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-kube-api-access-vpz7x" (OuterVolumeSpecName: "kube-api-access-vpz7x") pod "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" (UID: "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d"). InnerVolumeSpecName "kube-api-access-vpz7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.153615 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05eff0ec-546c-4c65-95da-51a499040af2-kube-api-access-5cjlb" (OuterVolumeSpecName: "kube-api-access-5cjlb") pod "05eff0ec-546c-4c65-95da-51a499040af2" (UID: "05eff0ec-546c-4c65-95da-51a499040af2"). InnerVolumeSpecName "kube-api-access-5cjlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.153704 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" (UID: "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.166427 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05eff0ec-546c-4c65-95da-51a499040af2-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "05eff0ec-546c-4c65-95da-51a499040af2" (UID: "05eff0ec-546c-4c65-95da-51a499040af2"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.176424 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-scripts" (OuterVolumeSpecName: "scripts") pod "05eff0ec-546c-4c65-95da-51a499040af2" (UID: "05eff0ec-546c-4c65-95da-51a499040af2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.183602 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-config-data" (OuterVolumeSpecName: "config-data") pod "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" (UID: "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.206398 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-config-data" (OuterVolumeSpecName: "config-data") pod "05eff0ec-546c-4c65-95da-51a499040af2" (UID: "05eff0ec-546c-4c65-95da-51a499040af2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.207340 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-scripts" (OuterVolumeSpecName: "scripts") pod "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" (UID: "7aa0b1b9-35ce-47c6-859d-86fdf4138b2d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.210509 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-scripts" (OuterVolumeSpecName: "scripts") pod "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" (UID: "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.229416 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-config-data" (OuterVolumeSpecName: "config-data") pod "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" (UID: "156a1f6f-2aeb-46e8-98b7-3bb2cee4128b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.237886 4672 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.237953 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.237966 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk9ll\" (UniqueName: \"kubernetes.io/projected/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-kube-api-access-jk9ll\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238003 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238048 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238059 4672 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238069 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpz7x\" (UniqueName: \"kubernetes.io/projected/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-kube-api-access-vpz7x\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238078 4672 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/05eff0ec-546c-4c65-95da-51a499040af2-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238087 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eff0ec-546c-4c65-95da-51a499040af2-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238094 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238126 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cjlb\" (UniqueName: \"kubernetes.io/projected/05eff0ec-546c-4c65-95da-51a499040af2-kube-api-access-5cjlb\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238137 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238145 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238153 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05eff0ec-546c-4c65-95da-51a499040af2-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.238161 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.361255 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-796c559987-ttpm4" event={"ID":"7aa0b1b9-35ce-47c6-859d-86fdf4138b2d","Type":"ContainerDied","Data":"9aeba4a70e77476ab36dba5e5b8db11a88b16580adf38bb57d70f43e7cca5b40"} Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.361322 4672 scope.go:117] "RemoveContainer" containerID="b69fa2adc76165439524c5edcd2cdf714c8f2f6e20caf17158f4b5141adfa2b5" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.361370 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-796c559987-ttpm4" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.366692 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66c969bf9c-mwvhc" event={"ID":"05eff0ec-546c-4c65-95da-51a499040af2","Type":"ContainerDied","Data":"4a2d0764c161b0e6c1a0d5824f3ac115553a36000c2ef3aa5f76c408282def99"} Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.366710 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66c969bf9c-mwvhc" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.371038 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41","Type":"ContainerStarted","Data":"5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99"} Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.371214 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="ceilometer-notification-agent" containerID="cri-o://f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44" gracePeriod=30 Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.371470 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.375243 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="proxy-httpd" containerID="cri-o://5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99" gracePeriod=30 Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.375344 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="sg-core" containerID="cri-o://01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01" gracePeriod=30 Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.382029 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5fcb5df989-stdqv" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.382123 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5fcb5df989-stdqv" event={"ID":"156a1f6f-2aeb-46e8-98b7-3bb2cee4128b","Type":"ContainerDied","Data":"f5c0c1a9e5e15a611be7ca9779d220eba85615361ea0152b8c0eec1968a285cf"} Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.409767 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-fbf4c46f7-rxrcl"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.431410 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-f4cdcdbb-4hkrg"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.441257 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-657f8cc4c6-kbnsj"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.449254 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5fcb5df989-stdqv"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.458570 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5fcb5df989-stdqv"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.465162 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b7dc7f747-w25nx"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.474097 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-78768fbc98-wqtlt"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.484958 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-66c969bf9c-mwvhc"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.494376 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-66c969bf9c-mwvhc"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.519134 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-796c559987-ttpm4"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.525445 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-796c559987-ttpm4"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.576562 4672 scope.go:117] "RemoveContainer" containerID="e2ffa69194e8cdaf98a5bc6c34739cd71dbd67aa5ccab0b47cd75822d7680dec" Oct 07 15:08:48 crc kubenswrapper[4672]: E1007 15:08:48.576626 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.103:5001/podified-antelope-centos9/openstack-cinder-api:b78cfc68a577b1553523c8a70a34e297\\\"\"" pod="openstack/cinder-db-sync-2j8d8" podUID="1d702410-d569-45e2-9f92-b8e0c7a0fd9d" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.630088 4672 scope.go:117] "RemoveContainer" containerID="a0f8e90e22fa289c6355eeebf4e277ecc3be56cdcc7c15e3d0b962fdc6de8090" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.645856 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6d9d7bb546-5l2xz"] Oct 07 15:08:48 crc kubenswrapper[4672]: E1007 15:08:48.646244 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05eff0ec-546c-4c65-95da-51a499040af2" containerName="horizon-log" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646260 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="05eff0ec-546c-4c65-95da-51a499040af2" containerName="horizon-log" Oct 07 15:08:48 crc kubenswrapper[4672]: E1007 15:08:48.646296 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerName="horizon-log" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646303 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerName="horizon-log" Oct 07 15:08:48 crc kubenswrapper[4672]: E1007 15:08:48.646312 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerName="horizon" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646318 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerName="horizon" Oct 07 15:08:48 crc kubenswrapper[4672]: E1007 15:08:48.646330 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerName="horizon" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646336 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerName="horizon" Oct 07 15:08:48 crc kubenswrapper[4672]: E1007 15:08:48.646344 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05eff0ec-546c-4c65-95da-51a499040af2" containerName="horizon" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646351 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="05eff0ec-546c-4c65-95da-51a499040af2" containerName="horizon" Oct 07 15:08:48 crc kubenswrapper[4672]: E1007 15:08:48.646362 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerName="horizon-log" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646368 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerName="horizon-log" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646538 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerName="horizon-log" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646549 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="05eff0ec-546c-4c65-95da-51a499040af2" containerName="horizon-log" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646558 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" containerName="horizon" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646571 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerName="horizon-log" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646581 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" containerName="horizon" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.646601 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="05eff0ec-546c-4c65-95da-51a499040af2" containerName="horizon" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.647565 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.649804 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.649957 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.676261 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6d9d7bb546-5l2xz"] Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.747278 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-logs\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.747657 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-combined-ca-bundle\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.747815 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-public-tls-certs\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.747846 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-config-data\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.748035 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-internal-tls-certs\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.748132 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-config-data-custom\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.748156 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcvzw\" (UniqueName: \"kubernetes.io/projected/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-kube-api-access-lcvzw\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.850260 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-internal-tls-certs\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.850339 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-config-data-custom\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.850372 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcvzw\" (UniqueName: \"kubernetes.io/projected/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-kube-api-access-lcvzw\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.850425 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-logs\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.850474 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-combined-ca-bundle\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.850515 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-public-tls-certs\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.850552 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-config-data\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.851555 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-logs\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.853121 4672 scope.go:117] "RemoveContainer" containerID="17ad6da9eb58157ef7daa0f3c7349469bee4d16cfa20b1fd2203dd5c3e39c216" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.854905 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-public-tls-certs\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.855993 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-combined-ca-bundle\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.856270 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-config-data\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.856464 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-config-data-custom\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.857647 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-internal-tls-certs\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.869781 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcvzw\" (UniqueName: \"kubernetes.io/projected/a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4-kube-api-access-lcvzw\") pod \"barbican-api-6d9d7bb546-5l2xz\" (UID: \"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4\") " pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.921299 4672 scope.go:117] "RemoveContainer" containerID="50cc0512e53bcab239f8340589616507cfe294f794a7ce10fd5eb9a7553d3527" Oct 07 15:08:48 crc kubenswrapper[4672]: I1007 15:08:48.983561 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.135111 4672 scope.go:117] "RemoveContainer" containerID="8e8584976358a6e50ddc0fc90a3a450f81a6e29d6a48d729a0aa8754312b31d8" Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.414422 4672 generic.go:334] "Generic (PLEG): container finished" podID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerID="5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99" exitCode=0 Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.414749 4672 generic.go:334] "Generic (PLEG): container finished" podID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerID="01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01" exitCode=2 Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.414855 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41","Type":"ContainerDied","Data":"5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.414890 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41","Type":"ContainerDied","Data":"01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.420960 4672 generic.go:334] "Generic (PLEG): container finished" podID="b0ca4fb8-e017-4010-ac17-07820394dc2b" containerID="c676ab2be5745d30263bb09eaf19a53e124459767b8c23b59f5352a1ae2b6439" exitCode=0 Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.421068 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" event={"ID":"b0ca4fb8-e017-4010-ac17-07820394dc2b","Type":"ContainerDied","Data":"c676ab2be5745d30263bb09eaf19a53e124459767b8c23b59f5352a1ae2b6439"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.421107 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" event={"ID":"b0ca4fb8-e017-4010-ac17-07820394dc2b","Type":"ContainerStarted","Data":"6e94d642f73c4d05026c8b014679ce6b76183a88d3b962a261ead26dffc38b1f"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.426313 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fbf4c46f7-rxrcl" event={"ID":"79138985-a31b-43fd-aec5-cfd8abf453d1","Type":"ContainerStarted","Data":"5e5800dc6f6e441f64810a0eda17f6ca4d7a3b37188af691a966038812decabc"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.429270 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" event={"ID":"dd7a9076-c512-4e9c-b626-482122ee920b","Type":"ContainerStarted","Data":"7a6dfe6a5e2f0ca90b17fbf1a45d4ce0ee9b07ea603a1fc89c5723f3f65e5d54"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.436247 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-657f8cc4c6-kbnsj" event={"ID":"797ac265-2c36-43ef-9e70-862fde9bacc5","Type":"ContainerStarted","Data":"f35c2319d79cee433b52a65e71334c1dcba7f4dddfae23a406d73563027b24ff"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.436292 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-657f8cc4c6-kbnsj" event={"ID":"797ac265-2c36-43ef-9e70-862fde9bacc5","Type":"ContainerStarted","Data":"a32c70228295c3f680b0027a8ff65803aca624a17d1df38c5c8cb782afd90d81"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.445344 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-78768fbc98-wqtlt" event={"ID":"8ee6c49e-c514-4798-ab81-0bc31c59048d","Type":"ContainerStarted","Data":"d61a3f06ba6837f0c3763bc2f01eb440444dde934b4ff1f6a99cf2aa3330f14c"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.445413 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-78768fbc98-wqtlt" event={"ID":"8ee6c49e-c514-4798-ab81-0bc31c59048d","Type":"ContainerStarted","Data":"0e357b0e367eb7b424450330a1dc4e62eede5ee87499b348085285314e831cfb"} Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.606938 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6d9d7bb546-5l2xz"] Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.909946 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05eff0ec-546c-4c65-95da-51a499040af2" path="/var/lib/kubelet/pods/05eff0ec-546c-4c65-95da-51a499040af2/volumes" Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.911112 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="156a1f6f-2aeb-46e8-98b7-3bb2cee4128b" path="/var/lib/kubelet/pods/156a1f6f-2aeb-46e8-98b7-3bb2cee4128b/volumes" Oct 07 15:08:49 crc kubenswrapper[4672]: I1007 15:08:49.912346 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7aa0b1b9-35ce-47c6-859d-86fdf4138b2d" path="/var/lib/kubelet/pods/7aa0b1b9-35ce-47c6-859d-86fdf4138b2d/volumes" Oct 07 15:08:49 crc kubenswrapper[4672]: W1007 15:08:49.997991 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9c5aa28_0ac6_4815_aecb_a8bfe04f2bb4.slice/crio-aca46f8aba159aa3275b6e4266143cb7af34ee04c7847d52c83e089f0d155860 WatchSource:0}: Error finding container aca46f8aba159aa3275b6e4266143cb7af34ee04c7847d52c83e089f0d155860: Status 404 returned error can't find the container with id aca46f8aba159aa3275b6e4266143cb7af34ee04c7847d52c83e089f0d155860 Oct 07 15:08:50 crc kubenswrapper[4672]: I1007 15:08:50.455341 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-657f8cc4c6-kbnsj" event={"ID":"797ac265-2c36-43ef-9e70-862fde9bacc5","Type":"ContainerStarted","Data":"dc31286ab0e81ab04ffbaeb47f1c36541ee7ddc855bef1f1c49dc4a93755da86"} Oct 07 15:08:50 crc kubenswrapper[4672]: I1007 15:08:50.455738 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:50 crc kubenswrapper[4672]: I1007 15:08:50.456563 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d9d7bb546-5l2xz" event={"ID":"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4","Type":"ContainerStarted","Data":"aca46f8aba159aa3275b6e4266143cb7af34ee04c7847d52c83e089f0d155860"} Oct 07 15:08:50 crc kubenswrapper[4672]: I1007 15:08:50.473506 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-657f8cc4c6-kbnsj" podStartSLOduration=5.47349007 podStartE2EDuration="5.47349007s" podCreationTimestamp="2025-10-07 15:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:50.472298976 +0000 UTC m=+1207.447477557" watchObservedRunningTime="2025-10-07 15:08:50.47349007 +0000 UTC m=+1207.448668651" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.227129 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.273632 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.300623 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-run-httpd\") pod \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.300680 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-combined-ca-bundle\") pod \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.300725 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-log-httpd\") pod \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.300809 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-sg-core-conf-yaml\") pod \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.300893 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-config-data\") pod \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.300931 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd59v\" (UniqueName: \"kubernetes.io/projected/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-kube-api-access-rd59v\") pod \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.300960 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-scripts\") pod \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\" (UID: \"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41\") " Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.301815 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" (UID: "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.302118 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" (UID: "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.309261 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-kube-api-access-rd59v" (OuterVolumeSpecName: "kube-api-access-rd59v") pod "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" (UID: "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41"). InnerVolumeSpecName "kube-api-access-rd59v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.336182 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-scripts" (OuterVolumeSpecName: "scripts") pod "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" (UID: "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.338264 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" (UID: "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.382154 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" (UID: "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.403228 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd59v\" (UniqueName: \"kubernetes.io/projected/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-kube-api-access-rd59v\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.403272 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.403289 4672 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.403303 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.403315 4672 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.403327 4672 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.408324 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-config-data" (OuterVolumeSpecName: "config-data") pod "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" (UID: "7f15bf66-1b2a-4084-8e99-33ed3d5e8f41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.485300 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5858475b48-6sdfp" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.486256 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-78768fbc98-wqtlt" event={"ID":"8ee6c49e-c514-4798-ab81-0bc31c59048d","Type":"ContainerStarted","Data":"32675e1a4a724dff6b8375143462cf8cc2434989d195c3c85a591f280eac99d5"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.486305 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.486403 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.489623 4672 generic.go:334] "Generic (PLEG): container finished" podID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerID="f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44" exitCode=0 Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.489713 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41","Type":"ContainerDied","Data":"f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.489740 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f15bf66-1b2a-4084-8e99-33ed3d5e8f41","Type":"ContainerDied","Data":"52d245b453b540116553eae84be7ce060fada39be100f48663ed5c8e0b562562"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.489742 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.489812 4672 scope.go:117] "RemoveContainer" containerID="5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.496939 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" event={"ID":"dd7a9076-c512-4e9c-b626-482122ee920b","Type":"ContainerStarted","Data":"aee814878ec7ccebb5c18d080ea1dafb35f25c3d0f14b359c6da1027d67d84ac"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.496998 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" event={"ID":"dd7a9076-c512-4e9c-b626-482122ee920b","Type":"ContainerStarted","Data":"bdb6e3ec271a698ed92177f06ca4c0e56d92b183d727ef49222512c259b62e84"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.506036 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.508190 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" event={"ID":"b0ca4fb8-e017-4010-ac17-07820394dc2b","Type":"ContainerStarted","Data":"814eb47cf8bec802f5297c0630ada9a149479dd0462ad9c73edb5b7a394b770a"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.508386 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.515537 4672 generic.go:334] "Generic (PLEG): container finished" podID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerID="f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc" exitCode=0 Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.515587 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bbdf8cc6b-btjlc" event={"ID":"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4","Type":"ContainerDied","Data":"f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.517368 4672 scope.go:117] "RemoveContainer" containerID="01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.518243 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d9d7bb546-5l2xz" event={"ID":"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4","Type":"ContainerStarted","Data":"a1ced2aaef8d41f70b329011a51a82ced4212d86ae68c27e0319ad7129ebacf1"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.518290 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d9d7bb546-5l2xz" event={"ID":"a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4","Type":"ContainerStarted","Data":"4bec417000bf52d75a9a1665277f5fc19bd9cc0c4270d6290b281bb768451eaf"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.518885 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.518951 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.526359 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-f4cdcdbb-4hkrg" podStartSLOduration=4.8863082349999996 podStartE2EDuration="6.526340879s" podCreationTimestamp="2025-10-07 15:08:45 +0000 UTC" firstStartedPulling="2025-10-07 15:08:48.589392726 +0000 UTC m=+1205.564571307" lastFinishedPulling="2025-10-07 15:08:50.22942537 +0000 UTC m=+1207.204603951" observedRunningTime="2025-10-07 15:08:51.522999403 +0000 UTC m=+1208.498177984" watchObservedRunningTime="2025-10-07 15:08:51.526340879 +0000 UTC m=+1208.501519460" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.526811 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fbf4c46f7-rxrcl" event={"ID":"79138985-a31b-43fd-aec5-cfd8abf453d1","Type":"ContainerStarted","Data":"fa12b9d02f35822d1a54027989169b28cf31a9f282c5cb5f30e9fbd999e6332f"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.526864 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fbf4c46f7-rxrcl" event={"ID":"79138985-a31b-43fd-aec5-cfd8abf453d1","Type":"ContainerStarted","Data":"a84b4cae33ac71ed7e3f6883e33ca475ad88b57895faab76e65ee441c8ae3e5a"} Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.556146 4672 scope.go:117] "RemoveContainer" containerID="f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.570651 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-78768fbc98-wqtlt" podStartSLOduration=18.570632132 podStartE2EDuration="18.570632132s" podCreationTimestamp="2025-10-07 15:08:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:51.562955501 +0000 UTC m=+1208.538134092" watchObservedRunningTime="2025-10-07 15:08:51.570632132 +0000 UTC m=+1208.545810713" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.582054 4672 scope.go:117] "RemoveContainer" containerID="5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99" Oct 07 15:08:51 crc kubenswrapper[4672]: E1007 15:08:51.585214 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99\": container with ID starting with 5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99 not found: ID does not exist" containerID="5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.585259 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99"} err="failed to get container status \"5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99\": rpc error: code = NotFound desc = could not find container \"5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99\": container with ID starting with 5a53f4074f830f942d2e0656994ae2fe2a0861f093bb24247809271117a61d99 not found: ID does not exist" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.585290 4672 scope.go:117] "RemoveContainer" containerID="01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01" Oct 07 15:08:51 crc kubenswrapper[4672]: E1007 15:08:51.585655 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01\": container with ID starting with 01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01 not found: ID does not exist" containerID="01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.585685 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01"} err="failed to get container status \"01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01\": rpc error: code = NotFound desc = could not find container \"01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01\": container with ID starting with 01778c1479e3d6de25683962be0f28e47aad82455c87fb5ed74858a66a9a3d01 not found: ID does not exist" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.585710 4672 scope.go:117] "RemoveContainer" containerID="f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44" Oct 07 15:08:51 crc kubenswrapper[4672]: E1007 15:08:51.585945 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44\": container with ID starting with f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44 not found: ID does not exist" containerID="f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.585973 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44"} err="failed to get container status \"f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44\": rpc error: code = NotFound desc = could not find container \"f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44\": container with ID starting with f2fd8fa9263710a6dd43216d0787d3683a1d3040f7712e8ef529451a3ec86e44 not found: ID does not exist" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.681547 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.686503 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.692734 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-fbf4c46f7-rxrcl" podStartSLOduration=5.023834846 podStartE2EDuration="6.692713488s" podCreationTimestamp="2025-10-07 15:08:45 +0000 UTC" firstStartedPulling="2025-10-07 15:08:48.589344045 +0000 UTC m=+1205.564522616" lastFinishedPulling="2025-10-07 15:08:50.258222677 +0000 UTC m=+1207.233401258" observedRunningTime="2025-10-07 15:08:51.643432973 +0000 UTC m=+1208.618611554" watchObservedRunningTime="2025-10-07 15:08:51.692713488 +0000 UTC m=+1208.667892089" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.723462 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:08:51 crc kubenswrapper[4672]: E1007 15:08:51.723921 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="ceilometer-notification-agent" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.723946 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="ceilometer-notification-agent" Oct 07 15:08:51 crc kubenswrapper[4672]: E1007 15:08:51.723961 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="sg-core" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.723969 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="sg-core" Oct 07 15:08:51 crc kubenswrapper[4672]: E1007 15:08:51.723994 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="proxy-httpd" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.724002 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="proxy-httpd" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.724253 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="ceilometer-notification-agent" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.724288 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="sg-core" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.724300 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" containerName="proxy-httpd" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.726778 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.731652 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.732009 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.732684 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.742947 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" podStartSLOduration=6.74292971 podStartE2EDuration="6.74292971s" podCreationTimestamp="2025-10-07 15:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:51.674227497 +0000 UTC m=+1208.649406098" watchObservedRunningTime="2025-10-07 15:08:51.74292971 +0000 UTC m=+1208.718108291" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.753568 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6d9d7bb546-5l2xz" podStartSLOduration=3.753552986 podStartE2EDuration="3.753552986s" podCreationTimestamp="2025-10-07 15:08:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:51.711438726 +0000 UTC m=+1208.686617337" watchObservedRunningTime="2025-10-07 15:08:51.753552986 +0000 UTC m=+1208.728731567" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.825323 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-log-httpd\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.825364 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-config-data\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.825385 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.825552 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-scripts\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.825608 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-run-httpd\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.825761 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.825861 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pplcf\" (UniqueName: \"kubernetes.io/projected/84d80210-ba4e-437a-948f-0c80099d72c5-kube-api-access-pplcf\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.902874 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f15bf66-1b2a-4084-8e99-33ed3d5e8f41" path="/var/lib/kubelet/pods/7f15bf66-1b2a-4084-8e99-33ed3d5e8f41/volumes" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.927832 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-scripts\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.927887 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-run-httpd\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.927961 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.928112 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pplcf\" (UniqueName: \"kubernetes.io/projected/84d80210-ba4e-437a-948f-0c80099d72c5-kube-api-access-pplcf\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.928198 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-log-httpd\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.928227 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-config-data\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.928250 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.929166 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-log-httpd\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.930759 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-run-httpd\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.933964 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.934093 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-config-data\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.935919 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-scripts\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.936463 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:51 crc kubenswrapper[4672]: I1007 15:08:51.947697 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pplcf\" (UniqueName: \"kubernetes.io/projected/84d80210-ba4e-437a-948f-0c80099d72c5-kube-api-access-pplcf\") pod \"ceilometer-0\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " pod="openstack/ceilometer-0" Oct 07 15:08:52 crc kubenswrapper[4672]: I1007 15:08:52.056780 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:08:52 crc kubenswrapper[4672]: I1007 15:08:52.540943 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:08:52 crc kubenswrapper[4672]: W1007 15:08:52.544264 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84d80210_ba4e_437a_948f_0c80099d72c5.slice/crio-3a1821737bce3c98b1e5d905dcaacc3b51c0c2180fd2488529f41333b3c718a2 WatchSource:0}: Error finding container 3a1821737bce3c98b1e5d905dcaacc3b51c0c2180fd2488529f41333b3c718a2: Status 404 returned error can't find the container with id 3a1821737bce3c98b1e5d905dcaacc3b51c0c2180fd2488529f41333b3c718a2 Oct 07 15:08:52 crc kubenswrapper[4672]: I1007 15:08:52.550424 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-bbdf8cc6b-btjlc" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Oct 07 15:08:53 crc kubenswrapper[4672]: I1007 15:08:53.546306 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerStarted","Data":"059265b19bf223cb286003f566522beb84339d457d1cc6e489855d359f59e512"} Oct 07 15:08:53 crc kubenswrapper[4672]: I1007 15:08:53.546657 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerStarted","Data":"3a1821737bce3c98b1e5d905dcaacc3b51c0c2180fd2488529f41333b3c718a2"} Oct 07 15:08:54 crc kubenswrapper[4672]: I1007 15:08:54.566783 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerStarted","Data":"458d411042ecae1f8215920a7a7960b32a2949df1ce77f871d98d161a30fe230"} Oct 07 15:08:55 crc kubenswrapper[4672]: I1007 15:08:55.576527 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerStarted","Data":"b7ce88b98127052b3aac9c436c3aa916884307ac1d1dcc4c68a1c12de434bf10"} Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.057620 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.121526 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-794bfdbb9f-mw8qf"] Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.122257 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" podUID="abc5aab9-1196-4fac-9d39-d44d60a0a6d5" containerName="dnsmasq-dns" containerID="cri-o://a0742bb6c3bc1235f49809fe17051ff39debfb3c0e4d0e89cf3af01f7da9ebf6" gracePeriod=10 Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.330770 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.331994 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.336434 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-2zdnj" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.337010 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.337178 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.349850 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.525070 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/452e785c-16f5-478c-8c52-638692cd3abd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.525806 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/452e785c-16f5-478c-8c52-638692cd3abd-openstack-config-secret\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.527569 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz86x\" (UniqueName: \"kubernetes.io/projected/452e785c-16f5-478c-8c52-638692cd3abd-kube-api-access-pz86x\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.527624 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/452e785c-16f5-478c-8c52-638692cd3abd-openstack-config\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.596500 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerStarted","Data":"3b00be89fc6f70278e5de211e9aa69107067c70c70568c382509abb59320c95c"} Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.598333 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.615909 4672 generic.go:334] "Generic (PLEG): container finished" podID="abc5aab9-1196-4fac-9d39-d44d60a0a6d5" containerID="a0742bb6c3bc1235f49809fe17051ff39debfb3c0e4d0e89cf3af01f7da9ebf6" exitCode=0 Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.615955 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" event={"ID":"abc5aab9-1196-4fac-9d39-d44d60a0a6d5","Type":"ContainerDied","Data":"a0742bb6c3bc1235f49809fe17051ff39debfb3c0e4d0e89cf3af01f7da9ebf6"} Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.634404 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/452e785c-16f5-478c-8c52-638692cd3abd-openstack-config-secret\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.634527 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/452e785c-16f5-478c-8c52-638692cd3abd-openstack-config\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.634554 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz86x\" (UniqueName: \"kubernetes.io/projected/452e785c-16f5-478c-8c52-638692cd3abd-kube-api-access-pz86x\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.634608 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/452e785c-16f5-478c-8c52-638692cd3abd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.636518 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/452e785c-16f5-478c-8c52-638692cd3abd-openstack-config\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.654041 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/452e785c-16f5-478c-8c52-638692cd3abd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.654770 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/452e785c-16f5-478c-8c52-638692cd3abd-openstack-config-secret\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.662756 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz86x\" (UniqueName: \"kubernetes.io/projected/452e785c-16f5-478c-8c52-638692cd3abd-kube-api-access-pz86x\") pod \"openstackclient\" (UID: \"452e785c-16f5-478c-8c52-638692cd3abd\") " pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.712614 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.734446 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.751093 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.230038253 podStartE2EDuration="5.751075632s" podCreationTimestamp="2025-10-07 15:08:51 +0000 UTC" firstStartedPulling="2025-10-07 15:08:52.546124409 +0000 UTC m=+1209.521302980" lastFinishedPulling="2025-10-07 15:08:56.067161778 +0000 UTC m=+1213.042340359" observedRunningTime="2025-10-07 15:08:56.645204181 +0000 UTC m=+1213.620382762" watchObservedRunningTime="2025-10-07 15:08:56.751075632 +0000 UTC m=+1213.726254213" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.838236 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-swift-storage-0\") pod \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.838285 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-svc\") pod \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.838352 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8qth\" (UniqueName: \"kubernetes.io/projected/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-kube-api-access-p8qth\") pod \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.838431 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-nb\") pod \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.838480 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-config\") pod \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.838546 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-sb\") pod \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\" (UID: \"abc5aab9-1196-4fac-9d39-d44d60a0a6d5\") " Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.870307 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-kube-api-access-p8qth" (OuterVolumeSpecName: "kube-api-access-p8qth") pod "abc5aab9-1196-4fac-9d39-d44d60a0a6d5" (UID: "abc5aab9-1196-4fac-9d39-d44d60a0a6d5"). InnerVolumeSpecName "kube-api-access-p8qth". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.910696 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "abc5aab9-1196-4fac-9d39-d44d60a0a6d5" (UID: "abc5aab9-1196-4fac-9d39-d44d60a0a6d5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.916694 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "abc5aab9-1196-4fac-9d39-d44d60a0a6d5" (UID: "abc5aab9-1196-4fac-9d39-d44d60a0a6d5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.922302 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "abc5aab9-1196-4fac-9d39-d44d60a0a6d5" (UID: "abc5aab9-1196-4fac-9d39-d44d60a0a6d5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.942876 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.942973 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.942991 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.943003 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8qth\" (UniqueName: \"kubernetes.io/projected/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-kube-api-access-p8qth\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.945178 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-config" (OuterVolumeSpecName: "config") pod "abc5aab9-1196-4fac-9d39-d44d60a0a6d5" (UID: "abc5aab9-1196-4fac-9d39-d44d60a0a6d5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:56 crc kubenswrapper[4672]: I1007 15:08:56.988683 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "abc5aab9-1196-4fac-9d39-d44d60a0a6d5" (UID: "abc5aab9-1196-4fac-9d39-d44d60a0a6d5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.048064 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.048095 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abc5aab9-1196-4fac-9d39-d44d60a0a6d5-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.320328 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 07 15:08:57 crc kubenswrapper[4672]: W1007 15:08:57.320728 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod452e785c_16f5_478c_8c52_638692cd3abd.slice/crio-43f18ed3f5201d972c45b622187cabdb57acf9846801b5d953742ee48096235a WatchSource:0}: Error finding container 43f18ed3f5201d972c45b622187cabdb57acf9846801b5d953742ee48096235a: Status 404 returned error can't find the container with id 43f18ed3f5201d972c45b622187cabdb57acf9846801b5d953742ee48096235a Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.640138 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.640472 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-794bfdbb9f-mw8qf" event={"ID":"abc5aab9-1196-4fac-9d39-d44d60a0a6d5","Type":"ContainerDied","Data":"70ff148092a7ba384122ed9604f6b437f9dd0e2983d9eabcb589685335dfefcc"} Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.640542 4672 scope.go:117] "RemoveContainer" containerID="a0742bb6c3bc1235f49809fe17051ff39debfb3c0e4d0e89cf3af01f7da9ebf6" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.648124 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"452e785c-16f5-478c-8c52-638692cd3abd","Type":"ContainerStarted","Data":"43f18ed3f5201d972c45b622187cabdb57acf9846801b5d953742ee48096235a"} Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.672830 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.696704 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-794bfdbb9f-mw8qf"] Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.709255 4672 scope.go:117] "RemoveContainer" containerID="077be3ff27cc1902958dfaad595be5a0207e0b1a43ceb70620b89cfc79b02133" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.711232 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-794bfdbb9f-mw8qf"] Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.918609 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abc5aab9-1196-4fac-9d39-d44d60a0a6d5" path="/var/lib/kubelet/pods/abc5aab9-1196-4fac-9d39-d44d60a0a6d5/volumes" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.993981 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6d96d8478f-sx526"] Oct 07 15:08:57 crc kubenswrapper[4672]: E1007 15:08:57.994940 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abc5aab9-1196-4fac-9d39-d44d60a0a6d5" containerName="dnsmasq-dns" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.994958 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="abc5aab9-1196-4fac-9d39-d44d60a0a6d5" containerName="dnsmasq-dns" Oct 07 15:08:57 crc kubenswrapper[4672]: E1007 15:08:57.994998 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abc5aab9-1196-4fac-9d39-d44d60a0a6d5" containerName="init" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.995005 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="abc5aab9-1196-4fac-9d39-d44d60a0a6d5" containerName="init" Oct 07 15:08:57 crc kubenswrapper[4672]: I1007 15:08:57.995410 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="abc5aab9-1196-4fac-9d39-d44d60a0a6d5" containerName="dnsmasq-dns" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.012004 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.018606 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.028917 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6d96d8478f-sx526"] Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.029208 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.045613 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.057181 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.092389 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e7406b25-bb39-409b-bde4-75cc32bf4ae2-etc-swift\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.092466 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-internal-tls-certs\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.092487 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-config-data\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.092531 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-public-tls-certs\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.092571 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2zl9\" (UniqueName: \"kubernetes.io/projected/e7406b25-bb39-409b-bde4-75cc32bf4ae2-kube-api-access-r2zl9\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.092609 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7406b25-bb39-409b-bde4-75cc32bf4ae2-run-httpd\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.092646 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-combined-ca-bundle\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.092760 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7406b25-bb39-409b-bde4-75cc32bf4ae2-log-httpd\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.195285 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e7406b25-bb39-409b-bde4-75cc32bf4ae2-etc-swift\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.195406 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-internal-tls-certs\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.195460 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-config-data\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.195535 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-public-tls-certs\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.195581 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2zl9\" (UniqueName: \"kubernetes.io/projected/e7406b25-bb39-409b-bde4-75cc32bf4ae2-kube-api-access-r2zl9\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.195631 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7406b25-bb39-409b-bde4-75cc32bf4ae2-run-httpd\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.195671 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-combined-ca-bundle\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.195808 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7406b25-bb39-409b-bde4-75cc32bf4ae2-log-httpd\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.196951 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7406b25-bb39-409b-bde4-75cc32bf4ae2-log-httpd\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.197049 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7406b25-bb39-409b-bde4-75cc32bf4ae2-run-httpd\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.203068 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-combined-ca-bundle\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.204422 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-internal-tls-certs\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.215453 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-public-tls-certs\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.218929 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7406b25-bb39-409b-bde4-75cc32bf4ae2-config-data\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.220686 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e7406b25-bb39-409b-bde4-75cc32bf4ae2-etc-swift\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.224888 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2zl9\" (UniqueName: \"kubernetes.io/projected/e7406b25-bb39-409b-bde4-75cc32bf4ae2-kube-api-access-r2zl9\") pod \"swift-proxy-6d96d8478f-sx526\" (UID: \"e7406b25-bb39-409b-bde4-75cc32bf4ae2\") " pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.398185 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.687966 4672 generic.go:334] "Generic (PLEG): container finished" podID="62307f94-c8e8-4781-a675-0951f6e1d797" containerID="e65a67703af4d8814c91d9faab26e202d32eb34eaac035fb55fbafa0fb73452f" exitCode=0 Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.688135 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kqgxx" event={"ID":"62307f94-c8e8-4781-a675-0951f6e1d797","Type":"ContainerDied","Data":"e65a67703af4d8814c91d9faab26e202d32eb34eaac035fb55fbafa0fb73452f"} Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.874098 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:08:58 crc kubenswrapper[4672]: I1007 15:08:58.962474 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6d96d8478f-sx526"] Oct 07 15:08:58 crc kubenswrapper[4672]: W1007 15:08:58.984341 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7406b25_bb39_409b_bde4_75cc32bf4ae2.slice/crio-69c6de3f13e64dcb76e05891df67c361237057cf8b7ff8ecf90470e92fa6364d WatchSource:0}: Error finding container 69c6de3f13e64dcb76e05891df67c361237057cf8b7ff8ecf90470e92fa6364d: Status 404 returned error can't find the container with id 69c6de3f13e64dcb76e05891df67c361237057cf8b7ff8ecf90470e92fa6364d Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.711558 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="ceilometer-central-agent" containerID="cri-o://059265b19bf223cb286003f566522beb84339d457d1cc6e489855d359f59e512" gracePeriod=30 Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.712037 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="proxy-httpd" containerID="cri-o://3b00be89fc6f70278e5de211e9aa69107067c70c70568c382509abb59320c95c" gracePeriod=30 Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.712121 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d96d8478f-sx526" event={"ID":"e7406b25-bb39-409b-bde4-75cc32bf4ae2","Type":"ContainerStarted","Data":"d90e1f7353890e4c8890c9307122d536084a44e57c803cbc3b02d19bf55ff2d2"} Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.712165 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="sg-core" containerID="cri-o://b7ce88b98127052b3aac9c436c3aa916884307ac1d1dcc4c68a1c12de434bf10" gracePeriod=30 Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.712169 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d96d8478f-sx526" event={"ID":"e7406b25-bb39-409b-bde4-75cc32bf4ae2","Type":"ContainerStarted","Data":"74e3240303e35caef3b2cd34378603643b48d60ed1cbdc69b5d79bce21c14c66"} Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.712311 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d96d8478f-sx526" event={"ID":"e7406b25-bb39-409b-bde4-75cc32bf4ae2","Type":"ContainerStarted","Data":"69c6de3f13e64dcb76e05891df67c361237057cf8b7ff8ecf90470e92fa6364d"} Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.712126 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="ceilometer-notification-agent" containerID="cri-o://458d411042ecae1f8215920a7a7960b32a2949df1ce77f871d98d161a30fe230" gracePeriod=30 Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.712473 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.712543 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:08:59 crc kubenswrapper[4672]: I1007 15:08:59.925395 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6d96d8478f-sx526" podStartSLOduration=2.925378142 podStartE2EDuration="2.925378142s" podCreationTimestamp="2025-10-07 15:08:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:08:59.737489326 +0000 UTC m=+1216.712667907" watchObservedRunningTime="2025-10-07 15:08:59.925378142 +0000 UTC m=+1216.900556723" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.165281 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.352118 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxlf4\" (UniqueName: \"kubernetes.io/projected/62307f94-c8e8-4781-a675-0951f6e1d797-kube-api-access-xxlf4\") pod \"62307f94-c8e8-4781-a675-0951f6e1d797\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.352293 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-combined-ca-bundle\") pod \"62307f94-c8e8-4781-a675-0951f6e1d797\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.352325 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-config\") pod \"62307f94-c8e8-4781-a675-0951f6e1d797\" (UID: \"62307f94-c8e8-4781-a675-0951f6e1d797\") " Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.361362 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62307f94-c8e8-4781-a675-0951f6e1d797-kube-api-access-xxlf4" (OuterVolumeSpecName: "kube-api-access-xxlf4") pod "62307f94-c8e8-4781-a675-0951f6e1d797" (UID: "62307f94-c8e8-4781-a675-0951f6e1d797"). InnerVolumeSpecName "kube-api-access-xxlf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.411200 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-config" (OuterVolumeSpecName: "config") pod "62307f94-c8e8-4781-a675-0951f6e1d797" (UID: "62307f94-c8e8-4781-a675-0951f6e1d797"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.416826 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62307f94-c8e8-4781-a675-0951f6e1d797" (UID: "62307f94-c8e8-4781-a675-0951f6e1d797"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.455552 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.455602 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/62307f94-c8e8-4781-a675-0951f6e1d797-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.455616 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxlf4\" (UniqueName: \"kubernetes.io/projected/62307f94-c8e8-4781-a675-0951f6e1d797-kube-api-access-xxlf4\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.773240 4672 generic.go:334] "Generic (PLEG): container finished" podID="84d80210-ba4e-437a-948f-0c80099d72c5" containerID="3b00be89fc6f70278e5de211e9aa69107067c70c70568c382509abb59320c95c" exitCode=0 Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.773670 4672 generic.go:334] "Generic (PLEG): container finished" podID="84d80210-ba4e-437a-948f-0c80099d72c5" containerID="b7ce88b98127052b3aac9c436c3aa916884307ac1d1dcc4c68a1c12de434bf10" exitCode=2 Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.773686 4672 generic.go:334] "Generic (PLEG): container finished" podID="84d80210-ba4e-437a-948f-0c80099d72c5" containerID="458d411042ecae1f8215920a7a7960b32a2949df1ce77f871d98d161a30fe230" exitCode=0 Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.773695 4672 generic.go:334] "Generic (PLEG): container finished" podID="84d80210-ba4e-437a-948f-0c80099d72c5" containerID="059265b19bf223cb286003f566522beb84339d457d1cc6e489855d359f59e512" exitCode=0 Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.773797 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerDied","Data":"3b00be89fc6f70278e5de211e9aa69107067c70c70568c382509abb59320c95c"} Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.773829 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerDied","Data":"b7ce88b98127052b3aac9c436c3aa916884307ac1d1dcc4c68a1c12de434bf10"} Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.773841 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerDied","Data":"458d411042ecae1f8215920a7a7960b32a2949df1ce77f871d98d161a30fe230"} Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.773862 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerDied","Data":"059265b19bf223cb286003f566522beb84339d457d1cc6e489855d359f59e512"} Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.803971 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kqgxx" event={"ID":"62307f94-c8e8-4781-a675-0951f6e1d797","Type":"ContainerDied","Data":"1a4e5673d6583ad7186236bcbf7333a925a476d5324cd4f5658e8d2c1e5d8fbb"} Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.804063 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a4e5673d6583ad7186236bcbf7333a925a476d5324cd4f5658e8d2c1e5d8fbb" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.804179 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kqgxx" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.991679 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cf7bc7f99-xbd4f"] Oct 07 15:09:00 crc kubenswrapper[4672]: E1007 15:09:00.992095 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62307f94-c8e8-4781-a675-0951f6e1d797" containerName="neutron-db-sync" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.992114 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="62307f94-c8e8-4781-a675-0951f6e1d797" containerName="neutron-db-sync" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.992290 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="62307f94-c8e8-4781-a675-0951f6e1d797" containerName="neutron-db-sync" Oct 07 15:09:00 crc kubenswrapper[4672]: I1007 15:09:00.993254 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.018133 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cf7bc7f99-xbd4f"] Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.059276 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.085068 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-svc\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.085122 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-config\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.085145 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-sb\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.085252 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk6sg\" (UniqueName: \"kubernetes.io/projected/f80774c7-30a9-4805-a540-489ff4f886ba-kube-api-access-kk6sg\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.085274 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-swift-storage-0\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.085310 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-nb\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.114255 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-544fc9bf6b-wfkjb"] Oct 07 15:09:01 crc kubenswrapper[4672]: E1007 15:09:01.114674 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="ceilometer-notification-agent" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.114687 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="ceilometer-notification-agent" Oct 07 15:09:01 crc kubenswrapper[4672]: E1007 15:09:01.114705 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="ceilometer-central-agent" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.114711 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="ceilometer-central-agent" Oct 07 15:09:01 crc kubenswrapper[4672]: E1007 15:09:01.114739 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="proxy-httpd" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.114746 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="proxy-httpd" Oct 07 15:09:01 crc kubenswrapper[4672]: E1007 15:09:01.114760 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="sg-core" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.114766 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="sg-core" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.114932 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="ceilometer-central-agent" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.114945 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="ceilometer-notification-agent" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.114967 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="proxy-httpd" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.114978 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" containerName="sg-core" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.115886 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.119735 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.119786 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.119957 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-dr29t" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.120139 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.146542 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-544fc9bf6b-wfkjb"] Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.187705 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-sg-core-conf-yaml\") pod \"84d80210-ba4e-437a-948f-0c80099d72c5\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188081 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pplcf\" (UniqueName: \"kubernetes.io/projected/84d80210-ba4e-437a-948f-0c80099d72c5-kube-api-access-pplcf\") pod \"84d80210-ba4e-437a-948f-0c80099d72c5\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188112 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-config-data\") pod \"84d80210-ba4e-437a-948f-0c80099d72c5\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188156 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-combined-ca-bundle\") pod \"84d80210-ba4e-437a-948f-0c80099d72c5\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188212 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-run-httpd\") pod \"84d80210-ba4e-437a-948f-0c80099d72c5\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188359 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-log-httpd\") pod \"84d80210-ba4e-437a-948f-0c80099d72c5\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188375 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-scripts\") pod \"84d80210-ba4e-437a-948f-0c80099d72c5\" (UID: \"84d80210-ba4e-437a-948f-0c80099d72c5\") " Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188596 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk6sg\" (UniqueName: \"kubernetes.io/projected/f80774c7-30a9-4805-a540-489ff4f886ba-kube-api-access-kk6sg\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188620 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-swift-storage-0\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188647 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-nb\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188759 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-svc\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188778 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-sb\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.188795 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-config\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.189657 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-config\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.199830 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84d80210-ba4e-437a-948f-0c80099d72c5-kube-api-access-pplcf" (OuterVolumeSpecName: "kube-api-access-pplcf") pod "84d80210-ba4e-437a-948f-0c80099d72c5" (UID: "84d80210-ba4e-437a-948f-0c80099d72c5"). InnerVolumeSpecName "kube-api-access-pplcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.200801 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "84d80210-ba4e-437a-948f-0c80099d72c5" (UID: "84d80210-ba4e-437a-948f-0c80099d72c5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.202493 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-scripts" (OuterVolumeSpecName: "scripts") pod "84d80210-ba4e-437a-948f-0c80099d72c5" (UID: "84d80210-ba4e-437a-948f-0c80099d72c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.203321 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "84d80210-ba4e-437a-948f-0c80099d72c5" (UID: "84d80210-ba4e-437a-948f-0c80099d72c5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.204263 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-nb\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.208175 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-sb\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.208379 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-svc\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.210234 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-swift-storage-0\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.240216 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk6sg\" (UniqueName: \"kubernetes.io/projected/f80774c7-30a9-4805-a540-489ff4f886ba-kube-api-access-kk6sg\") pod \"dnsmasq-dns-6cf7bc7f99-xbd4f\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.293369 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-ovndb-tls-certs\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.293466 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-httpd-config\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.293490 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-combined-ca-bundle\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.293527 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-config\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.293587 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgqk8\" (UniqueName: \"kubernetes.io/projected/b9491fc8-cf87-4bfe-8907-f2362983fcc1-kube-api-access-tgqk8\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.293692 4672 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.293705 4672 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84d80210-ba4e-437a-948f-0c80099d72c5-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.293716 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.293729 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pplcf\" (UniqueName: \"kubernetes.io/projected/84d80210-ba4e-437a-948f-0c80099d72c5-kube-api-access-pplcf\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.331702 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "84d80210-ba4e-437a-948f-0c80099d72c5" (UID: "84d80210-ba4e-437a-948f-0c80099d72c5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.396772 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.401251 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84d80210-ba4e-437a-948f-0c80099d72c5" (UID: "84d80210-ba4e-437a-948f-0c80099d72c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.406011 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-ovndb-tls-certs\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.406350 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-httpd-config\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.406367 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-combined-ca-bundle\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.406392 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-config\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.406436 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgqk8\" (UniqueName: \"kubernetes.io/projected/b9491fc8-cf87-4bfe-8907-f2362983fcc1-kube-api-access-tgqk8\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.406488 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.406500 4672 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.411859 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-ovndb-tls-certs\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.421708 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-combined-ca-bundle\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.426889 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-config\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.429500 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-httpd-config\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.438159 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgqk8\" (UniqueName: \"kubernetes.io/projected/b9491fc8-cf87-4bfe-8907-f2362983fcc1-kube-api-access-tgqk8\") pod \"neutron-544fc9bf6b-wfkjb\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.477169 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-config-data" (OuterVolumeSpecName: "config-data") pod "84d80210-ba4e-437a-948f-0c80099d72c5" (UID: "84d80210-ba4e-437a-948f-0c80099d72c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.491198 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.509377 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d80210-ba4e-437a-948f-0c80099d72c5-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.638746 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.850885 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84d80210-ba4e-437a-948f-0c80099d72c5","Type":"ContainerDied","Data":"3a1821737bce3c98b1e5d905dcaacc3b51c0c2180fd2488529f41333b3c718a2"} Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.851254 4672 scope.go:117] "RemoveContainer" containerID="3b00be89fc6f70278e5de211e9aa69107067c70c70568c382509abb59320c95c" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.851402 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.863039 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2j8d8" event={"ID":"1d702410-d569-45e2-9f92-b8e0c7a0fd9d","Type":"ContainerStarted","Data":"db12a9de8539ba1a88463b35d19faa9c4c6aa8d220bae12a7f2cee228bdfbadc"} Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.941350 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-2j8d8" podStartSLOduration=8.752713074 podStartE2EDuration="42.941331174s" podCreationTimestamp="2025-10-07 15:08:19 +0000 UTC" firstStartedPulling="2025-10-07 15:08:25.782903898 +0000 UTC m=+1182.758082479" lastFinishedPulling="2025-10-07 15:08:59.971522008 +0000 UTC m=+1216.946700579" observedRunningTime="2025-10-07 15:09:01.897281739 +0000 UTC m=+1218.872460320" watchObservedRunningTime="2025-10-07 15:09:01.941331174 +0000 UTC m=+1218.916509755" Oct 07 15:09:01 crc kubenswrapper[4672]: I1007 15:09:01.964192 4672 scope.go:117] "RemoveContainer" containerID="b7ce88b98127052b3aac9c436c3aa916884307ac1d1dcc4c68a1c12de434bf10" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.011342 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.020710 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.027089 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.029850 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.030988 4672 scope.go:117] "RemoveContainer" containerID="458d411042ecae1f8215920a7a7960b32a2949df1ce77f871d98d161a30fe230" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.034941 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.035361 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.035478 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.140188 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-scripts\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.140294 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-config-data\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.140362 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-log-httpd\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.140389 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd949\" (UniqueName: \"kubernetes.io/projected/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-kube-api-access-fd949\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.140414 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.140456 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.140483 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-run-httpd\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.242415 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.242469 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-run-httpd\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.242604 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-scripts\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.242690 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-config-data\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.242765 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-log-httpd\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.242794 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd949\" (UniqueName: \"kubernetes.io/projected/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-kube-api-access-fd949\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.242882 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.244452 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-run-httpd\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.246790 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6d9d7bb546-5l2xz" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.252807 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-scripts\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.254828 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.255361 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.270628 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd949\" (UniqueName: \"kubernetes.io/projected/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-kube-api-access-fd949\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.272914 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-config-data\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.308785 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cf7bc7f99-xbd4f"] Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.326256 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-657f8cc4c6-kbnsj"] Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.326652 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-657f8cc4c6-kbnsj" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api-log" containerID="cri-o://f35c2319d79cee433b52a65e71334c1dcba7f4dddfae23a406d73563027b24ff" gracePeriod=30 Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.327108 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-657f8cc4c6-kbnsj" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api" containerID="cri-o://dc31286ab0e81ab04ffbaeb47f1c36541ee7ddc855bef1f1c49dc4a93755da86" gracePeriod=30 Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.342578 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-log-httpd\") pod \"ceilometer-0\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.377152 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-657f8cc4c6-kbnsj" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": EOF" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.378456 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.406260 4672 scope.go:117] "RemoveContainer" containerID="059265b19bf223cb286003f566522beb84339d457d1cc6e489855d359f59e512" Oct 07 15:09:02 crc kubenswrapper[4672]: W1007 15:09:02.439246 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf80774c7_30a9_4805_a540_489ff4f886ba.slice/crio-63c3504c6819224b196ee3bb830e6e18a1808945b03a93cfb85f82be52b37ace WatchSource:0}: Error finding container 63c3504c6819224b196ee3bb830e6e18a1808945b03a93cfb85f82be52b37ace: Status 404 returned error can't find the container with id 63c3504c6819224b196ee3bb830e6e18a1808945b03a93cfb85f82be52b37ace Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.488456 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-544fc9bf6b-wfkjb"] Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.553185 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-bbdf8cc6b-btjlc" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.919063 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-544fc9bf6b-wfkjb" event={"ID":"b9491fc8-cf87-4bfe-8907-f2362983fcc1","Type":"ContainerStarted","Data":"21515c4b1387ae95a73b5adda9417683b5d6da71adbf60b9bf4abb248dd9ab3e"} Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.964435 4672 generic.go:334] "Generic (PLEG): container finished" podID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerID="f35c2319d79cee433b52a65e71334c1dcba7f4dddfae23a406d73563027b24ff" exitCode=143 Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.964525 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-657f8cc4c6-kbnsj" event={"ID":"797ac265-2c36-43ef-9e70-862fde9bacc5","Type":"ContainerDied","Data":"f35c2319d79cee433b52a65e71334c1dcba7f4dddfae23a406d73563027b24ff"} Oct 07 15:09:02 crc kubenswrapper[4672]: I1007 15:09:02.973417 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" event={"ID":"f80774c7-30a9-4805-a540-489ff4f886ba","Type":"ContainerStarted","Data":"63c3504c6819224b196ee3bb830e6e18a1808945b03a93cfb85f82be52b37ace"} Oct 07 15:09:03 crc kubenswrapper[4672]: I1007 15:09:03.081945 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:03 crc kubenswrapper[4672]: I1007 15:09:03.916162 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84d80210-ba4e-437a-948f-0c80099d72c5" path="/var/lib/kubelet/pods/84d80210-ba4e-437a-948f-0c80099d72c5/volumes" Oct 07 15:09:04 crc kubenswrapper[4672]: I1007 15:09:04.007784 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-544fc9bf6b-wfkjb" event={"ID":"b9491fc8-cf87-4bfe-8907-f2362983fcc1","Type":"ContainerStarted","Data":"1894479f002df42e43f5b0e8adee0de7c1757ea5446835ee3137a6c3a96f62e7"} Oct 07 15:09:04 crc kubenswrapper[4672]: I1007 15:09:04.007832 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-544fc9bf6b-wfkjb" event={"ID":"b9491fc8-cf87-4bfe-8907-f2362983fcc1","Type":"ContainerStarted","Data":"5a702e8a30c015fddceb118b2c7dba9af0ed3ff6ca48e53f2afd284434aa4316"} Oct 07 15:09:04 crc kubenswrapper[4672]: I1007 15:09:04.009085 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:04 crc kubenswrapper[4672]: I1007 15:09:04.035321 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerStarted","Data":"e8e83ec7e6bcd16e57e8085610158006a2e5d2f370956ac858cf5eafd986478c"} Oct 07 15:09:04 crc kubenswrapper[4672]: I1007 15:09:04.035383 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerStarted","Data":"6bce14d1b45c2e29447d782e9b8f4f126d95d7a1bdbae8183030e804967fa765"} Oct 07 15:09:04 crc kubenswrapper[4672]: I1007 15:09:04.048464 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-544fc9bf6b-wfkjb" podStartSLOduration=3.048444653 podStartE2EDuration="3.048444653s" podCreationTimestamp="2025-10-07 15:09:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:04.039170797 +0000 UTC m=+1221.014349378" watchObservedRunningTime="2025-10-07 15:09:04.048444653 +0000 UTC m=+1221.023623234" Oct 07 15:09:04 crc kubenswrapper[4672]: I1007 15:09:04.058113 4672 generic.go:334] "Generic (PLEG): container finished" podID="f80774c7-30a9-4805-a540-489ff4f886ba" containerID="7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c" exitCode=0 Oct 07 15:09:04 crc kubenswrapper[4672]: I1007 15:09:04.061273 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" event={"ID":"f80774c7-30a9-4805-a540-489ff4f886ba","Type":"ContainerDied","Data":"7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c"} Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.020453 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6c7bf59845-qwdkz"] Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.022662 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.025721 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.025965 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.043594 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c7bf59845-qwdkz"] Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.133304 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" event={"ID":"f80774c7-30a9-4805-a540-489ff4f886ba","Type":"ContainerStarted","Data":"37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248"} Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.134742 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.147670 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerStarted","Data":"2dd39d87e163c09a4e30a8cf7c1ed02c500610c5eb379a1a0af591aef4da0ef6"} Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.161735 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" podStartSLOduration=5.161710238 podStartE2EDuration="5.161710238s" podCreationTimestamp="2025-10-07 15:09:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:05.15826852 +0000 UTC m=+1222.133447101" watchObservedRunningTime="2025-10-07 15:09:05.161710238 +0000 UTC m=+1222.136888819" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.216459 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-public-tls-certs\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.216578 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-httpd-config\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.216613 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-config\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.216627 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-internal-tls-certs\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.216672 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgtp5\" (UniqueName: \"kubernetes.io/projected/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-kube-api-access-bgtp5\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.216737 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-ovndb-tls-certs\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.216829 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-combined-ca-bundle\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.318727 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-httpd-config\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.318810 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-config\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.318830 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-internal-tls-certs\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.318871 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgtp5\" (UniqueName: \"kubernetes.io/projected/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-kube-api-access-bgtp5\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.318896 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-ovndb-tls-certs\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.318974 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-combined-ca-bundle\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.329205 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-public-tls-certs\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.338510 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-public-tls-certs\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.345286 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-combined-ca-bundle\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.368511 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-httpd-config\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.377624 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-internal-tls-certs\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.379222 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-config\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.395580 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-ovndb-tls-certs\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.396835 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgtp5\" (UniqueName: \"kubernetes.io/projected/fdb59d6a-51ea-4daa-904b-54a9a1af23f7-kube-api-access-bgtp5\") pod \"neutron-6c7bf59845-qwdkz\" (UID: \"fdb59d6a-51ea-4daa-904b-54a9a1af23f7\") " pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.647683 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:05 crc kubenswrapper[4672]: I1007 15:09:05.850146 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:09:06 crc kubenswrapper[4672]: I1007 15:09:06.153782 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-78768fbc98-wqtlt" Oct 07 15:09:06 crc kubenswrapper[4672]: I1007 15:09:06.163749 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerStarted","Data":"3ad7b49ed11249164dced50d245e5e6070105dabb2cde377ae8e2f4b30b17b8b"} Oct 07 15:09:06 crc kubenswrapper[4672]: I1007 15:09:06.355139 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c7bf59845-qwdkz"] Oct 07 15:09:06 crc kubenswrapper[4672]: W1007 15:09:06.372763 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdb59d6a_51ea_4daa_904b_54a9a1af23f7.slice/crio-4ccddbdf3a54d526023538cf984c4e1e8b6cf58185e0d0a30ad4654b21516bd8 WatchSource:0}: Error finding container 4ccddbdf3a54d526023538cf984c4e1e8b6cf58185e0d0a30ad4654b21516bd8: Status 404 returned error can't find the container with id 4ccddbdf3a54d526023538cf984c4e1e8b6cf58185e0d0a30ad4654b21516bd8 Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.177403 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c7bf59845-qwdkz" event={"ID":"fdb59d6a-51ea-4daa-904b-54a9a1af23f7","Type":"ContainerStarted","Data":"bc77c81f7c576db0fb60ae5f123d63695290e6e1503487c831579f885b074976"} Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.178245 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.178270 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c7bf59845-qwdkz" event={"ID":"fdb59d6a-51ea-4daa-904b-54a9a1af23f7","Type":"ContainerStarted","Data":"8266556b3ca19c6c3a40769f36ab16665c06202e3ef24613b89fceef3b256f3b"} Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.178286 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c7bf59845-qwdkz" event={"ID":"fdb59d6a-51ea-4daa-904b-54a9a1af23f7","Type":"ContainerStarted","Data":"4ccddbdf3a54d526023538cf984c4e1e8b6cf58185e0d0a30ad4654b21516bd8"} Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.187415 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerStarted","Data":"0c793d7d513ccaff013d0b18fc87dc6d5727de33bd1d9b4e43d4d959b59b187a"} Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.200378 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6c7bf59845-qwdkz" podStartSLOduration=3.200359851 podStartE2EDuration="3.200359851s" podCreationTimestamp="2025-10-07 15:09:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:07.196311615 +0000 UTC m=+1224.171490216" watchObservedRunningTime="2025-10-07 15:09:07.200359851 +0000 UTC m=+1224.175538432" Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.219675 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.570127575 podStartE2EDuration="6.219651525s" podCreationTimestamp="2025-10-07 15:09:01 +0000 UTC" firstStartedPulling="2025-10-07 15:09:03.112397238 +0000 UTC m=+1220.087575819" lastFinishedPulling="2025-10-07 15:09:06.761921188 +0000 UTC m=+1223.737099769" observedRunningTime="2025-10-07 15:09:07.219544772 +0000 UTC m=+1224.194723383" watchObservedRunningTime="2025-10-07 15:09:07.219651525 +0000 UTC m=+1224.194830126" Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.852056 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.874607 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-657f8cc4c6-kbnsj" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:37060->10.217.0.155:9311: read: connection reset by peer" Oct 07 15:09:07 crc kubenswrapper[4672]: I1007 15:09:07.874604 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-657f8cc4c6-kbnsj" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:37070->10.217.0.155:9311: read: connection reset by peer" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.220150 4672 generic.go:334] "Generic (PLEG): container finished" podID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerID="dc31286ab0e81ab04ffbaeb47f1c36541ee7ddc855bef1f1c49dc4a93755da86" exitCode=0 Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.220663 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-657f8cc4c6-kbnsj" event={"ID":"797ac265-2c36-43ef-9e70-862fde9bacc5","Type":"ContainerDied","Data":"dc31286ab0e81ab04ffbaeb47f1c36541ee7ddc855bef1f1c49dc4a93755da86"} Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.224280 4672 generic.go:334] "Generic (PLEG): container finished" podID="1d702410-d569-45e2-9f92-b8e0c7a0fd9d" containerID="db12a9de8539ba1a88463b35d19faa9c4c6aa8d220bae12a7f2cee228bdfbadc" exitCode=0 Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.225261 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2j8d8" event={"ID":"1d702410-d569-45e2-9f92-b8e0c7a0fd9d","Type":"ContainerDied","Data":"db12a9de8539ba1a88463b35d19faa9c4c6aa8d220bae12a7f2cee228bdfbadc"} Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.225308 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.404720 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.408101 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6d96d8478f-sx526" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.425562 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.503194 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-combined-ca-bundle\") pod \"797ac265-2c36-43ef-9e70-862fde9bacc5\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.503326 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tr9mg\" (UniqueName: \"kubernetes.io/projected/797ac265-2c36-43ef-9e70-862fde9bacc5-kube-api-access-tr9mg\") pod \"797ac265-2c36-43ef-9e70-862fde9bacc5\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.503350 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data-custom\") pod \"797ac265-2c36-43ef-9e70-862fde9bacc5\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.503403 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data\") pod \"797ac265-2c36-43ef-9e70-862fde9bacc5\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.503496 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797ac265-2c36-43ef-9e70-862fde9bacc5-logs\") pod \"797ac265-2c36-43ef-9e70-862fde9bacc5\" (UID: \"797ac265-2c36-43ef-9e70-862fde9bacc5\") " Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.508170 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/797ac265-2c36-43ef-9e70-862fde9bacc5-logs" (OuterVolumeSpecName: "logs") pod "797ac265-2c36-43ef-9e70-862fde9bacc5" (UID: "797ac265-2c36-43ef-9e70-862fde9bacc5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.531373 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "797ac265-2c36-43ef-9e70-862fde9bacc5" (UID: "797ac265-2c36-43ef-9e70-862fde9bacc5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.535688 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/797ac265-2c36-43ef-9e70-862fde9bacc5-kube-api-access-tr9mg" (OuterVolumeSpecName: "kube-api-access-tr9mg") pod "797ac265-2c36-43ef-9e70-862fde9bacc5" (UID: "797ac265-2c36-43ef-9e70-862fde9bacc5"). InnerVolumeSpecName "kube-api-access-tr9mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.558192 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "797ac265-2c36-43ef-9e70-862fde9bacc5" (UID: "797ac265-2c36-43ef-9e70-862fde9bacc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.607797 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797ac265-2c36-43ef-9e70-862fde9bacc5-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.607826 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.607837 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tr9mg\" (UniqueName: \"kubernetes.io/projected/797ac265-2c36-43ef-9e70-862fde9bacc5-kube-api-access-tr9mg\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.607848 4672 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.629944 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data" (OuterVolumeSpecName: "config-data") pod "797ac265-2c36-43ef-9e70-862fde9bacc5" (UID: "797ac265-2c36-43ef-9e70-862fde9bacc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:08 crc kubenswrapper[4672]: I1007 15:09:08.709989 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797ac265-2c36-43ef-9e70-862fde9bacc5-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.234257 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-657f8cc4c6-kbnsj" event={"ID":"797ac265-2c36-43ef-9e70-862fde9bacc5","Type":"ContainerDied","Data":"a32c70228295c3f680b0027a8ff65803aca624a17d1df38c5c8cb782afd90d81"} Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.234323 4672 scope.go:117] "RemoveContainer" containerID="dc31286ab0e81ab04ffbaeb47f1c36541ee7ddc855bef1f1c49dc4a93755da86" Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.234345 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-657f8cc4c6-kbnsj" Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.234719 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="ceilometer-central-agent" containerID="cri-o://e8e83ec7e6bcd16e57e8085610158006a2e5d2f370956ac858cf5eafd986478c" gracePeriod=30 Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.235117 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="proxy-httpd" containerID="cri-o://0c793d7d513ccaff013d0b18fc87dc6d5727de33bd1d9b4e43d4d959b59b187a" gracePeriod=30 Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.235168 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="sg-core" containerID="cri-o://3ad7b49ed11249164dced50d245e5e6070105dabb2cde377ae8e2f4b30b17b8b" gracePeriod=30 Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.235203 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="ceilometer-notification-agent" containerID="cri-o://2dd39d87e163c09a4e30a8cf7c1ed02c500610c5eb379a1a0af591aef4da0ef6" gracePeriod=30 Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.280179 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-657f8cc4c6-kbnsj"] Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.290927 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-657f8cc4c6-kbnsj"] Oct 07 15:09:09 crc kubenswrapper[4672]: I1007 15:09:09.905874 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" path="/var/lib/kubelet/pods/797ac265-2c36-43ef-9e70-862fde9bacc5/volumes" Oct 07 15:09:10 crc kubenswrapper[4672]: I1007 15:09:10.248910 4672 generic.go:334] "Generic (PLEG): container finished" podID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerID="0c793d7d513ccaff013d0b18fc87dc6d5727de33bd1d9b4e43d4d959b59b187a" exitCode=0 Oct 07 15:09:10 crc kubenswrapper[4672]: I1007 15:09:10.248953 4672 generic.go:334] "Generic (PLEG): container finished" podID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerID="3ad7b49ed11249164dced50d245e5e6070105dabb2cde377ae8e2f4b30b17b8b" exitCode=2 Oct 07 15:09:10 crc kubenswrapper[4672]: I1007 15:09:10.248964 4672 generic.go:334] "Generic (PLEG): container finished" podID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerID="2dd39d87e163c09a4e30a8cf7c1ed02c500610c5eb379a1a0af591aef4da0ef6" exitCode=0 Oct 07 15:09:10 crc kubenswrapper[4672]: I1007 15:09:10.248996 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerDied","Data":"0c793d7d513ccaff013d0b18fc87dc6d5727de33bd1d9b4e43d4d959b59b187a"} Oct 07 15:09:10 crc kubenswrapper[4672]: I1007 15:09:10.249045 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerDied","Data":"3ad7b49ed11249164dced50d245e5e6070105dabb2cde377ae8e2f4b30b17b8b"} Oct 07 15:09:10 crc kubenswrapper[4672]: I1007 15:09:10.249060 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerDied","Data":"2dd39d87e163c09a4e30a8cf7c1ed02c500610c5eb379a1a0af591aef4da0ef6"} Oct 07 15:09:11 crc kubenswrapper[4672]: I1007 15:09:11.399183 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:11 crc kubenswrapper[4672]: I1007 15:09:11.459001 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b7dc7f747-w25nx"] Oct 07 15:09:11 crc kubenswrapper[4672]: I1007 15:09:11.459362 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" podUID="b0ca4fb8-e017-4010-ac17-07820394dc2b" containerName="dnsmasq-dns" containerID="cri-o://814eb47cf8bec802f5297c0630ada9a149479dd0462ad9c73edb5b7a394b770a" gracePeriod=10 Oct 07 15:09:12 crc kubenswrapper[4672]: I1007 15:09:12.271967 4672 generic.go:334] "Generic (PLEG): container finished" podID="b0ca4fb8-e017-4010-ac17-07820394dc2b" containerID="814eb47cf8bec802f5297c0630ada9a149479dd0462ad9c73edb5b7a394b770a" exitCode=0 Oct 07 15:09:12 crc kubenswrapper[4672]: I1007 15:09:12.272068 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" event={"ID":"b0ca4fb8-e017-4010-ac17-07820394dc2b","Type":"ContainerDied","Data":"814eb47cf8bec802f5297c0630ada9a149479dd0462ad9c73edb5b7a394b770a"} Oct 07 15:09:12 crc kubenswrapper[4672]: I1007 15:09:12.551157 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-bbdf8cc6b-btjlc" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Oct 07 15:09:12 crc kubenswrapper[4672]: I1007 15:09:12.551330 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:09:14 crc kubenswrapper[4672]: I1007 15:09:14.303680 4672 generic.go:334] "Generic (PLEG): container finished" podID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerID="e8e83ec7e6bcd16e57e8085610158006a2e5d2f370956ac858cf5eafd986478c" exitCode=0 Oct 07 15:09:14 crc kubenswrapper[4672]: I1007 15:09:14.303872 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerDied","Data":"e8e83ec7e6bcd16e57e8085610158006a2e5d2f370956ac858cf5eafd986478c"} Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.216888 4672 scope.go:117] "RemoveContainer" containerID="f35c2319d79cee433b52a65e71334c1dcba7f4dddfae23a406d73563027b24ff" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.270485 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.322593 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2j8d8" event={"ID":"1d702410-d569-45e2-9f92-b8e0c7a0fd9d","Type":"ContainerDied","Data":"317c2370a8392259342bee7c8d70e2ff0c3fc81cc68632f544cbe4a1a02b76f0"} Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.322935 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="317c2370a8392259342bee7c8d70e2ff0c3fc81cc68632f544cbe4a1a02b76f0" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.322989 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2j8d8" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.335664 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-combined-ca-bundle\") pod \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.335725 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b268t\" (UniqueName: \"kubernetes.io/projected/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-kube-api-access-b268t\") pod \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.335801 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-config-data\") pod \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.335916 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-db-sync-config-data\") pod \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.335941 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-etc-machine-id\") pod \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.336110 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-scripts\") pod \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\" (UID: \"1d702410-d569-45e2-9f92-b8e0c7a0fd9d\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.340916 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1d702410-d569-45e2-9f92-b8e0c7a0fd9d" (UID: "1d702410-d569-45e2-9f92-b8e0c7a0fd9d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.340998 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1d702410-d569-45e2-9f92-b8e0c7a0fd9d" (UID: "1d702410-d569-45e2-9f92-b8e0c7a0fd9d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.341178 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-kube-api-access-b268t" (OuterVolumeSpecName: "kube-api-access-b268t") pod "1d702410-d569-45e2-9f92-b8e0c7a0fd9d" (UID: "1d702410-d569-45e2-9f92-b8e0c7a0fd9d"). InnerVolumeSpecName "kube-api-access-b268t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.343171 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-scripts" (OuterVolumeSpecName: "scripts") pod "1d702410-d569-45e2-9f92-b8e0c7a0fd9d" (UID: "1d702410-d569-45e2-9f92-b8e0c7a0fd9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.371336 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d702410-d569-45e2-9f92-b8e0c7a0fd9d" (UID: "1d702410-d569-45e2-9f92-b8e0c7a0fd9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.415792 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-config-data" (OuterVolumeSpecName: "config-data") pod "1d702410-d569-45e2-9f92-b8e0c7a0fd9d" (UID: "1d702410-d569-45e2-9f92-b8e0c7a0fd9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.439935 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.439961 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.439971 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b268t\" (UniqueName: \"kubernetes.io/projected/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-kube-api-access-b268t\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.439979 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.439987 4672 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.439995 4672 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1d702410-d569-45e2-9f92-b8e0c7a0fd9d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.458733 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.540789 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-swift-storage-0\") pod \"b0ca4fb8-e017-4010-ac17-07820394dc2b\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.540922 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74rcs\" (UniqueName: \"kubernetes.io/projected/b0ca4fb8-e017-4010-ac17-07820394dc2b-kube-api-access-74rcs\") pod \"b0ca4fb8-e017-4010-ac17-07820394dc2b\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.540980 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-nb\") pod \"b0ca4fb8-e017-4010-ac17-07820394dc2b\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.541120 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-svc\") pod \"b0ca4fb8-e017-4010-ac17-07820394dc2b\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.541144 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-config\") pod \"b0ca4fb8-e017-4010-ac17-07820394dc2b\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.541174 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-sb\") pod \"b0ca4fb8-e017-4010-ac17-07820394dc2b\" (UID: \"b0ca4fb8-e017-4010-ac17-07820394dc2b\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.545594 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0ca4fb8-e017-4010-ac17-07820394dc2b-kube-api-access-74rcs" (OuterVolumeSpecName: "kube-api-access-74rcs") pod "b0ca4fb8-e017-4010-ac17-07820394dc2b" (UID: "b0ca4fb8-e017-4010-ac17-07820394dc2b"). InnerVolumeSpecName "kube-api-access-74rcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.578933 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.591322 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b0ca4fb8-e017-4010-ac17-07820394dc2b" (UID: "b0ca4fb8-e017-4010-ac17-07820394dc2b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.598255 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b0ca4fb8-e017-4010-ac17-07820394dc2b" (UID: "b0ca4fb8-e017-4010-ac17-07820394dc2b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.602395 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b0ca4fb8-e017-4010-ac17-07820394dc2b" (UID: "b0ca4fb8-e017-4010-ac17-07820394dc2b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.603679 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b0ca4fb8-e017-4010-ac17-07820394dc2b" (UID: "b0ca4fb8-e017-4010-ac17-07820394dc2b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.609037 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-config" (OuterVolumeSpecName: "config") pod "b0ca4fb8-e017-4010-ac17-07820394dc2b" (UID: "b0ca4fb8-e017-4010-ac17-07820394dc2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.642382 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-run-httpd\") pod \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.642447 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-config-data\") pod \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.642570 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-scripts\") pod \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.642611 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fd949\" (UniqueName: \"kubernetes.io/projected/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-kube-api-access-fd949\") pod \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.642635 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-combined-ca-bundle\") pod \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.642652 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-sg-core-conf-yaml\") pod \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.642697 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-log-httpd\") pod \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\" (UID: \"38ceb57e-4f73-4188-9325-bc1d0a8f24fa\") " Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.643073 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "38ceb57e-4f73-4188-9325-bc1d0a8f24fa" (UID: "38ceb57e-4f73-4188-9325-bc1d0a8f24fa"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.643094 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.643107 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.643116 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74rcs\" (UniqueName: \"kubernetes.io/projected/b0ca4fb8-e017-4010-ac17-07820394dc2b-kube-api-access-74rcs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.643125 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.643134 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.643141 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0ca4fb8-e017-4010-ac17-07820394dc2b-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.643970 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "38ceb57e-4f73-4188-9325-bc1d0a8f24fa" (UID: "38ceb57e-4f73-4188-9325-bc1d0a8f24fa"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.646759 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-scripts" (OuterVolumeSpecName: "scripts") pod "38ceb57e-4f73-4188-9325-bc1d0a8f24fa" (UID: "38ceb57e-4f73-4188-9325-bc1d0a8f24fa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.649237 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-kube-api-access-fd949" (OuterVolumeSpecName: "kube-api-access-fd949") pod "38ceb57e-4f73-4188-9325-bc1d0a8f24fa" (UID: "38ceb57e-4f73-4188-9325-bc1d0a8f24fa"). InnerVolumeSpecName "kube-api-access-fd949". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.672471 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "38ceb57e-4f73-4188-9325-bc1d0a8f24fa" (UID: "38ceb57e-4f73-4188-9325-bc1d0a8f24fa"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.715588 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38ceb57e-4f73-4188-9325-bc1d0a8f24fa" (UID: "38ceb57e-4f73-4188-9325-bc1d0a8f24fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.739056 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-config-data" (OuterVolumeSpecName: "config-data") pod "38ceb57e-4f73-4188-9325-bc1d0a8f24fa" (UID: "38ceb57e-4f73-4188-9325-bc1d0a8f24fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.745421 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.745455 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fd949\" (UniqueName: \"kubernetes.io/projected/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-kube-api-access-fd949\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.745472 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.745486 4672 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.745495 4672 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.745505 4672 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:15 crc kubenswrapper[4672]: I1007 15:09:15.745514 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ceb57e-4f73-4188-9325-bc1d0a8f24fa-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.332447 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"452e785c-16f5-478c-8c52-638692cd3abd","Type":"ContainerStarted","Data":"fa046a3ab067dba20334c96d861ff26c9bc763d3285adbc312adef0c5b6ee030"} Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.335678 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.335672 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7dc7f747-w25nx" event={"ID":"b0ca4fb8-e017-4010-ac17-07820394dc2b","Type":"ContainerDied","Data":"6e94d642f73c4d05026c8b014679ce6b76183a88d3b962a261ead26dffc38b1f"} Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.335815 4672 scope.go:117] "RemoveContainer" containerID="814eb47cf8bec802f5297c0630ada9a149479dd0462ad9c73edb5b7a394b770a" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.338968 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38ceb57e-4f73-4188-9325-bc1d0a8f24fa","Type":"ContainerDied","Data":"6bce14d1b45c2e29447d782e9b8f4f126d95d7a1bdbae8183030e804967fa765"} Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.339072 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.352532 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.409416835 podStartE2EDuration="20.352513365s" podCreationTimestamp="2025-10-07 15:08:56 +0000 UTC" firstStartedPulling="2025-10-07 15:08:57.323120312 +0000 UTC m=+1214.298298893" lastFinishedPulling="2025-10-07 15:09:15.266216842 +0000 UTC m=+1232.241395423" observedRunningTime="2025-10-07 15:09:16.350551148 +0000 UTC m=+1233.325729749" watchObservedRunningTime="2025-10-07 15:09:16.352513365 +0000 UTC m=+1233.327691946" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.362316 4672 scope.go:117] "RemoveContainer" containerID="c676ab2be5745d30263bb09eaf19a53e124459767b8c23b59f5352a1ae2b6439" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.368413 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b7dc7f747-w25nx"] Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.375685 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b7dc7f747-w25nx"] Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.390338 4672 scope.go:117] "RemoveContainer" containerID="0c793d7d513ccaff013d0b18fc87dc6d5727de33bd1d9b4e43d4d959b59b187a" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.394399 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.411606 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430191 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:16 crc kubenswrapper[4672]: E1007 15:09:16.430589 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="proxy-httpd" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430604 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="proxy-httpd" Oct 07 15:09:16 crc kubenswrapper[4672]: E1007 15:09:16.430617 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0ca4fb8-e017-4010-ac17-07820394dc2b" containerName="init" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430624 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0ca4fb8-e017-4010-ac17-07820394dc2b" containerName="init" Oct 07 15:09:16 crc kubenswrapper[4672]: E1007 15:09:16.430640 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="ceilometer-notification-agent" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430647 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="ceilometer-notification-agent" Oct 07 15:09:16 crc kubenswrapper[4672]: E1007 15:09:16.430663 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api-log" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430668 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api-log" Oct 07 15:09:16 crc kubenswrapper[4672]: E1007 15:09:16.430676 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="sg-core" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430681 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="sg-core" Oct 07 15:09:16 crc kubenswrapper[4672]: E1007 15:09:16.430694 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="ceilometer-central-agent" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430700 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="ceilometer-central-agent" Oct 07 15:09:16 crc kubenswrapper[4672]: E1007 15:09:16.430715 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0ca4fb8-e017-4010-ac17-07820394dc2b" containerName="dnsmasq-dns" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430722 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0ca4fb8-e017-4010-ac17-07820394dc2b" containerName="dnsmasq-dns" Oct 07 15:09:16 crc kubenswrapper[4672]: E1007 15:09:16.430729 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d702410-d569-45e2-9f92-b8e0c7a0fd9d" containerName="cinder-db-sync" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430735 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d702410-d569-45e2-9f92-b8e0c7a0fd9d" containerName="cinder-db-sync" Oct 07 15:09:16 crc kubenswrapper[4672]: E1007 15:09:16.430745 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430751 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430913 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api-log" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430928 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="sg-core" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430940 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0ca4fb8-e017-4010-ac17-07820394dc2b" containerName="dnsmasq-dns" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430949 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="proxy-httpd" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430961 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="ceilometer-notification-agent" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430970 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d702410-d569-45e2-9f92-b8e0c7a0fd9d" containerName="cinder-db-sync" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430980 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" containerName="ceilometer-central-agent" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.430990 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="797ac265-2c36-43ef-9e70-862fde9bacc5" containerName="barbican-api" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.432524 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.434444 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.434805 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.443063 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.461576 4672 scope.go:117] "RemoveContainer" containerID="3ad7b49ed11249164dced50d245e5e6070105dabb2cde377ae8e2f4b30b17b8b" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.493324 4672 scope.go:117] "RemoveContainer" containerID="2dd39d87e163c09a4e30a8cf7c1ed02c500610c5eb379a1a0af591aef4da0ef6" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.512887 4672 scope.go:117] "RemoveContainer" containerID="e8e83ec7e6bcd16e57e8085610158006a2e5d2f370956ac858cf5eafd986478c" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.558730 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-scripts\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.558819 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.559074 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.559144 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-run-httpd\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.559215 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-log-httpd\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.559299 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-config-data\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.559380 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qr48\" (UniqueName: \"kubernetes.io/projected/88c00ca7-b902-498c-8c6a-3e44c0b54419-kube-api-access-8qr48\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.602116 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.603927 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.609958 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.610275 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.610431 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.610569 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-cxgrr" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.615039 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.644881 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78cd4749fc-v9n9w"] Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.646605 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.661990 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-scripts\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662102 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662135 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662172 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5dmz\" (UniqueName: \"kubernetes.io/projected/2aa440b2-4922-4a96-bc12-38f2ae3e374a-kube-api-access-l5dmz\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662206 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-scripts\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662243 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662259 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662276 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662296 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-run-httpd\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662319 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-log-httpd\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662344 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-config-data\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662365 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2aa440b2-4922-4a96-bc12-38f2ae3e374a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.662389 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qr48\" (UniqueName: \"kubernetes.io/projected/88c00ca7-b902-498c-8c6a-3e44c0b54419-kube-api-access-8qr48\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.663393 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-log-httpd\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.663613 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-run-httpd\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.670892 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-config-data\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.672661 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.678619 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-scripts\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.686550 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.695957 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qr48\" (UniqueName: \"kubernetes.io/projected/88c00ca7-b902-498c-8c6a-3e44c0b54419-kube-api-access-8qr48\") pod \"ceilometer-0\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.713943 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd4749fc-v9n9w"] Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.760997 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763320 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2aa440b2-4922-4a96-bc12-38f2ae3e374a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763371 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763401 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-svc\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763422 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763442 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5m55\" (UniqueName: \"kubernetes.io/projected/8e76fa3b-d36e-4700-b046-1ba853b1730a-kube-api-access-c5m55\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763472 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5dmz\" (UniqueName: \"kubernetes.io/projected/2aa440b2-4922-4a96-bc12-38f2ae3e374a-kube-api-access-l5dmz\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763495 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763518 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-scripts\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763555 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763574 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763594 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763630 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-config\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.763717 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2aa440b2-4922-4a96-bc12-38f2ae3e374a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.768236 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.776380 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.778563 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.782405 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-scripts\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.807686 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5dmz\" (UniqueName: \"kubernetes.io/projected/2aa440b2-4922-4a96-bc12-38f2ae3e374a-kube-api-access-l5dmz\") pod \"cinder-scheduler-0\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.866970 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.867038 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5m55\" (UniqueName: \"kubernetes.io/projected/8e76fa3b-d36e-4700-b046-1ba853b1730a-kube-api-access-c5m55\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.867085 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.867141 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.867177 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-config\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.867233 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-svc\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.868059 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-svc\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.868574 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.869353 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.869850 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.879580 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-config\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.910261 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5m55\" (UniqueName: \"kubernetes.io/projected/8e76fa3b-d36e-4700-b046-1ba853b1730a-kube-api-access-c5m55\") pod \"dnsmasq-dns-78cd4749fc-v9n9w\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:16 crc kubenswrapper[4672]: I1007 15:09:16.929781 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.044492 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.059078 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.062099 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.066009 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.077412 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.187438 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jq9d\" (UniqueName: \"kubernetes.io/projected/fe3abe51-0a62-4040-af4c-b5cb79a89edb-kube-api-access-7jq9d\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.187485 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3abe51-0a62-4040-af4c-b5cb79a89edb-logs\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.187524 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-scripts\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.187573 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3abe51-0a62-4040-af4c-b5cb79a89edb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.187605 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.187742 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data-custom\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.187865 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.289442 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jq9d\" (UniqueName: \"kubernetes.io/projected/fe3abe51-0a62-4040-af4c-b5cb79a89edb-kube-api-access-7jq9d\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.289488 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3abe51-0a62-4040-af4c-b5cb79a89edb-logs\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.289531 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-scripts\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.289584 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3abe51-0a62-4040-af4c-b5cb79a89edb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.289625 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.289662 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data-custom\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.289687 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.290147 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3abe51-0a62-4040-af4c-b5cb79a89edb-logs\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.290216 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3abe51-0a62-4040-af4c-b5cb79a89edb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.295982 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-scripts\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.301278 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.309317 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data-custom\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.324608 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.326913 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jq9d\" (UniqueName: \"kubernetes.io/projected/fe3abe51-0a62-4040-af4c-b5cb79a89edb-kube-api-access-7jq9d\") pod \"cinder-api-0\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.489458 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.492368 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.587346 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.679562 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd4749fc-v9n9w"] Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.814245 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 15:09:17 crc kubenswrapper[4672]: W1007 15:09:17.831906 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e76fa3b_d36e_4700_b046_1ba853b1730a.slice/crio-802f170ca29eb7b21a8398f574634626be2dfe3e23cc29eb5448986593cd0abc WatchSource:0}: Error finding container 802f170ca29eb7b21a8398f574634626be2dfe3e23cc29eb5448986593cd0abc: Status 404 returned error can't find the container with id 802f170ca29eb7b21a8398f574634626be2dfe3e23cc29eb5448986593cd0abc Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.930195 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38ceb57e-4f73-4188-9325-bc1d0a8f24fa" path="/var/lib/kubelet/pods/38ceb57e-4f73-4188-9325-bc1d0a8f24fa/volumes" Oct 07 15:09:17 crc kubenswrapper[4672]: I1007 15:09:17.931340 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0ca4fb8-e017-4010-ac17-07820394dc2b" path="/var/lib/kubelet/pods/b0ca4fb8-e017-4010-ac17-07820394dc2b/volumes" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.370326 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.374728 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerStarted","Data":"009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357"} Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.374777 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerStarted","Data":"a6886e58ad833a89038c252ada71a393371b1fccb368722cd05ae1bc47e0b7f6"} Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.381807 4672 generic.go:334] "Generic (PLEG): container finished" podID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerID="3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd" exitCode=137 Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.381880 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bbdf8cc6b-btjlc" event={"ID":"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4","Type":"ContainerDied","Data":"3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd"} Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.381912 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bbdf8cc6b-btjlc" event={"ID":"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4","Type":"ContainerDied","Data":"50392358c71a51f4782ac87ca03d782339d6aa19aedb651da24f09e7e2604e72"} Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.381933 4672 scope.go:117] "RemoveContainer" containerID="f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.382096 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bbdf8cc6b-btjlc" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.399655 4672 generic.go:334] "Generic (PLEG): container finished" podID="8e76fa3b-d36e-4700-b046-1ba853b1730a" containerID="108a4dc4754471257133a71c713a8c0637673e0012918dd776e0219715559e3b" exitCode=0 Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.399734 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" event={"ID":"8e76fa3b-d36e-4700-b046-1ba853b1730a","Type":"ContainerDied","Data":"108a4dc4754471257133a71c713a8c0637673e0012918dd776e0219715559e3b"} Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.399759 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" event={"ID":"8e76fa3b-d36e-4700-b046-1ba853b1730a","Type":"ContainerStarted","Data":"802f170ca29eb7b21a8398f574634626be2dfe3e23cc29eb5448986593cd0abc"} Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.423082 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-tls-certs\") pod \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.423130 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-config-data\") pod \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.423235 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-secret-key\") pod \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.423264 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-combined-ca-bundle\") pod \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.423293 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-logs\") pod \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.423310 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlgzs\" (UniqueName: \"kubernetes.io/projected/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-kube-api-access-dlgzs\") pod \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.423340 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-scripts\") pod \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\" (UID: \"0fdc29d3-c934-4da2-870a-9ee2fd82b5e4\") " Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.424643 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-logs" (OuterVolumeSpecName: "logs") pod "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" (UID: "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.441308 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2aa440b2-4922-4a96-bc12-38f2ae3e374a","Type":"ContainerStarted","Data":"9b9d0a64ee32c472dfc492349c89081665d3894cdb3788a83171cef7279d967f"} Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.446185 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" (UID: "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.477303 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-kube-api-access-dlgzs" (OuterVolumeSpecName: "kube-api-access-dlgzs") pod "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" (UID: "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4"). InnerVolumeSpecName "kube-api-access-dlgzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.478898 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3abe51-0a62-4040-af4c-b5cb79a89edb","Type":"ContainerStarted","Data":"b930b065ff1ba5f43b2e4f88ada4d3520bf3d0966c0f79221153aeb52298d4a4"} Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.491776 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-scripts" (OuterVolumeSpecName: "scripts") pod "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" (UID: "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.517444 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-config-data" (OuterVolumeSpecName: "config-data") pod "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" (UID: "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.518947 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" (UID: "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.525970 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.526007 4672 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.526050 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.526059 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.526066 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlgzs\" (UniqueName: \"kubernetes.io/projected/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-kube-api-access-dlgzs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.526075 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.541762 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" (UID: "0fdc29d3-c934-4da2-870a-9ee2fd82b5e4"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.621418 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-pkll5"] Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.628144 4672 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:18 crc kubenswrapper[4672]: E1007 15:09:18.631240 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.631271 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon" Oct 07 15:09:18 crc kubenswrapper[4672]: E1007 15:09:18.631290 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon-log" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.631296 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon-log" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.631513 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.631536 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" containerName="horizon-log" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.632171 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-pkll5"] Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.632262 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pkll5" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.708527 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-mc8ns"] Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.709634 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mc8ns" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.730888 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvxvl\" (UniqueName: \"kubernetes.io/projected/294a14e2-fc34-497e-b003-f82a21411703-kube-api-access-kvxvl\") pod \"nova-api-db-create-pkll5\" (UID: \"294a14e2-fc34-497e-b003-f82a21411703\") " pod="openstack/nova-api-db-create-pkll5" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.744136 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-mc8ns"] Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.758032 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-bbdf8cc6b-btjlc"] Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.766730 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-bbdf8cc6b-btjlc"] Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.816539 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-89g5s"] Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.818049 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-89g5s" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.832510 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvxvl\" (UniqueName: \"kubernetes.io/projected/294a14e2-fc34-497e-b003-f82a21411703-kube-api-access-kvxvl\") pod \"nova-api-db-create-pkll5\" (UID: \"294a14e2-fc34-497e-b003-f82a21411703\") " pod="openstack/nova-api-db-create-pkll5" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.832649 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7tv8\" (UniqueName: \"kubernetes.io/projected/274cd1ef-76a6-45f3-ad79-690178d34d7b-kube-api-access-x7tv8\") pod \"nova-cell0-db-create-mc8ns\" (UID: \"274cd1ef-76a6-45f3-ad79-690178d34d7b\") " pod="openstack/nova-cell0-db-create-mc8ns" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.835995 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-89g5s"] Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.859733 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvxvl\" (UniqueName: \"kubernetes.io/projected/294a14e2-fc34-497e-b003-f82a21411703-kube-api-access-kvxvl\") pod \"nova-api-db-create-pkll5\" (UID: \"294a14e2-fc34-497e-b003-f82a21411703\") " pod="openstack/nova-api-db-create-pkll5" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.865463 4672 scope.go:117] "RemoveContainer" containerID="3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.923916 4672 scope.go:117] "RemoveContainer" containerID="f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc" Oct 07 15:09:18 crc kubenswrapper[4672]: E1007 15:09:18.924493 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc\": container with ID starting with f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc not found: ID does not exist" containerID="f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.924552 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc"} err="failed to get container status \"f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc\": rpc error: code = NotFound desc = could not find container \"f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc\": container with ID starting with f4ed41d15c9324d2fa86ea81a8452cf8594a0aff60d786a96cae9ec0bbc1f5fc not found: ID does not exist" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.924592 4672 scope.go:117] "RemoveContainer" containerID="3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd" Oct 07 15:09:18 crc kubenswrapper[4672]: E1007 15:09:18.925743 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd\": container with ID starting with 3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd not found: ID does not exist" containerID="3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.925777 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd"} err="failed to get container status \"3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd\": rpc error: code = NotFound desc = could not find container \"3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd\": container with ID starting with 3517cd109bfb1d55bdd7faf394bc938e36795c022754150fc7cdd2da9dffadbd not found: ID does not exist" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.941191 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz5mt\" (UniqueName: \"kubernetes.io/projected/55b482fc-c341-4d8f-aec4-ab97e93573e8-kube-api-access-xz5mt\") pod \"nova-cell1-db-create-89g5s\" (UID: \"55b482fc-c341-4d8f-aec4-ab97e93573e8\") " pod="openstack/nova-cell1-db-create-89g5s" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.941281 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7tv8\" (UniqueName: \"kubernetes.io/projected/274cd1ef-76a6-45f3-ad79-690178d34d7b-kube-api-access-x7tv8\") pod \"nova-cell0-db-create-mc8ns\" (UID: \"274cd1ef-76a6-45f3-ad79-690178d34d7b\") " pod="openstack/nova-cell0-db-create-mc8ns" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.963622 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7tv8\" (UniqueName: \"kubernetes.io/projected/274cd1ef-76a6-45f3-ad79-690178d34d7b-kube-api-access-x7tv8\") pod \"nova-cell0-db-create-mc8ns\" (UID: \"274cd1ef-76a6-45f3-ad79-690178d34d7b\") " pod="openstack/nova-cell0-db-create-mc8ns" Oct 07 15:09:18 crc kubenswrapper[4672]: I1007 15:09:18.968472 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pkll5" Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.041946 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mc8ns" Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.044506 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz5mt\" (UniqueName: \"kubernetes.io/projected/55b482fc-c341-4d8f-aec4-ab97e93573e8-kube-api-access-xz5mt\") pod \"nova-cell1-db-create-89g5s\" (UID: \"55b482fc-c341-4d8f-aec4-ab97e93573e8\") " pod="openstack/nova-cell1-db-create-89g5s" Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.074565 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz5mt\" (UniqueName: \"kubernetes.io/projected/55b482fc-c341-4d8f-aec4-ab97e93573e8-kube-api-access-xz5mt\") pod \"nova-cell1-db-create-89g5s\" (UID: \"55b482fc-c341-4d8f-aec4-ab97e93573e8\") " pod="openstack/nova-cell1-db-create-89g5s" Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.098538 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.148286 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-89g5s" Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.151141 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.161523 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerName="glance-log" containerID="cri-o://a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d" gracePeriod=30 Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.162211 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerName="glance-httpd" containerID="cri-o://ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649" gracePeriod=30 Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.550508 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2aa440b2-4922-4a96-bc12-38f2ae3e374a","Type":"ContainerStarted","Data":"e9439c7c8c5df960de1d7f8b46766647496d0b182d4360275a52942c4ba15005"} Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.563728 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3abe51-0a62-4040-af4c-b5cb79a89edb","Type":"ContainerStarted","Data":"971d88de6e33fe486b6dc5a3c9a5ed7707a70486701ca849f74ddec083de4c3a"} Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.573278 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerStarted","Data":"425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b"} Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.605803 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" event={"ID":"8e76fa3b-d36e-4700-b046-1ba853b1730a","Type":"ContainerStarted","Data":"33b8d2cf3e62fb61f9dd03cdb1112c5db182a1e9d794889e3694cf9751f13d5e"} Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.605876 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.620681 4672 generic.go:334] "Generic (PLEG): container finished" podID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerID="a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d" exitCode=143 Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.620725 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ed2a01b2-d44a-48b1-8463-84d5d873b9a7","Type":"ContainerDied","Data":"a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d"} Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.638093 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-pkll5"] Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.645462 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" podStartSLOduration=3.645443173 podStartE2EDuration="3.645443173s" podCreationTimestamp="2025-10-07 15:09:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:19.630699444 +0000 UTC m=+1236.605878025" watchObservedRunningTime="2025-10-07 15:09:19.645443173 +0000 UTC m=+1236.620621754" Oct 07 15:09:19 crc kubenswrapper[4672]: W1007 15:09:19.866715 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod274cd1ef_76a6_45f3_ad79_690178d34d7b.slice/crio-380e5328408e47fa8a005b11e5ccc519bc2624b2dcae8a700cc8f3c950f17d5a WatchSource:0}: Error finding container 380e5328408e47fa8a005b11e5ccc519bc2624b2dcae8a700cc8f3c950f17d5a: Status 404 returned error can't find the container with id 380e5328408e47fa8a005b11e5ccc519bc2624b2dcae8a700cc8f3c950f17d5a Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.868500 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-mc8ns"] Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.930624 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fdc29d3-c934-4da2-870a-9ee2fd82b5e4" path="/var/lib/kubelet/pods/0fdc29d3-c934-4da2-870a-9ee2fd82b5e4/volumes" Oct 07 15:09:19 crc kubenswrapper[4672]: I1007 15:09:19.965359 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-89g5s"] Oct 07 15:09:20 crc kubenswrapper[4672]: E1007 15:09:20.532410 4672 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod294a14e2_fc34_497e_b003_f82a21411703.slice/crio-04cad2e1ddba4e4df454bea7f0831d97dd98275d849a79038c71da6c197da5df.scope\": RecentStats: unable to find data in memory cache]" Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.633888 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerName="cinder-api-log" containerID="cri-o://971d88de6e33fe486b6dc5a3c9a5ed7707a70486701ca849f74ddec083de4c3a" gracePeriod=30 Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.634484 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3abe51-0a62-4040-af4c-b5cb79a89edb","Type":"ContainerStarted","Data":"2bb689ef487d921dbc3c597d1ca0f63e00723761e51a50a4eddb5ba466614ea2"} Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.634530 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.634814 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerName="cinder-api" containerID="cri-o://2bb689ef487d921dbc3c597d1ca0f63e00723761e51a50a4eddb5ba466614ea2" gracePeriod=30 Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.636410 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-89g5s" event={"ID":"55b482fc-c341-4d8f-aec4-ab97e93573e8","Type":"ContainerStarted","Data":"586333b22a618707e73ab2c0507f425cf5197af4fd25cf725d2fe593e8427c69"} Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.641197 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mc8ns" event={"ID":"274cd1ef-76a6-45f3-ad79-690178d34d7b","Type":"ContainerStarted","Data":"865df58d2dda1bec8ab7581bccb70148613e8ede07d0b4ad84846053d02c4bec"} Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.641265 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mc8ns" event={"ID":"274cd1ef-76a6-45f3-ad79-690178d34d7b","Type":"ContainerStarted","Data":"380e5328408e47fa8a005b11e5ccc519bc2624b2dcae8a700cc8f3c950f17d5a"} Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.646730 4672 generic.go:334] "Generic (PLEG): container finished" podID="294a14e2-fc34-497e-b003-f82a21411703" containerID="04cad2e1ddba4e4df454bea7f0831d97dd98275d849a79038c71da6c197da5df" exitCode=0 Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.646809 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pkll5" event={"ID":"294a14e2-fc34-497e-b003-f82a21411703","Type":"ContainerDied","Data":"04cad2e1ddba4e4df454bea7f0831d97dd98275d849a79038c71da6c197da5df"} Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.646832 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pkll5" event={"ID":"294a14e2-fc34-497e-b003-f82a21411703","Type":"ContainerStarted","Data":"e9754a5be76bebed65082e45d63d04736dd772441b5a8598508fc3ac026e0314"} Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.652851 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2aa440b2-4922-4a96-bc12-38f2ae3e374a","Type":"ContainerStarted","Data":"d2ef298d5c4474114598239077141110864832749127e8d10c543e0a691743f1"} Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.656416 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.656395243 podStartE2EDuration="3.656395243s" podCreationTimestamp="2025-10-07 15:09:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:20.653611772 +0000 UTC m=+1237.628790353" watchObservedRunningTime="2025-10-07 15:09:20.656395243 +0000 UTC m=+1237.631573814" Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.681558 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.407007328 podStartE2EDuration="4.681537355s" podCreationTimestamp="2025-10-07 15:09:16 +0000 UTC" firstStartedPulling="2025-10-07 15:09:17.603202209 +0000 UTC m=+1234.578380790" lastFinishedPulling="2025-10-07 15:09:17.877732236 +0000 UTC m=+1234.852910817" observedRunningTime="2025-10-07 15:09:20.677748025 +0000 UTC m=+1237.652926616" watchObservedRunningTime="2025-10-07 15:09:20.681537355 +0000 UTC m=+1237.656715936" Oct 07 15:09:20 crc kubenswrapper[4672]: I1007 15:09:20.709887 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-mc8ns" podStartSLOduration=2.7098654189999998 podStartE2EDuration="2.709865419s" podCreationTimestamp="2025-10-07 15:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:20.708151629 +0000 UTC m=+1237.683330220" watchObservedRunningTime="2025-10-07 15:09:20.709865419 +0000 UTC m=+1237.685044010" Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.520914 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.525751 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="70296716-492c-452b-8d58-0591749c61f1" containerName="glance-httpd" containerID="cri-o://c7f77eb1b8370e6a8572b2ab38b36b61b5604f5c74127e666a98d354b1cf10f8" gracePeriod=30 Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.525724 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="70296716-492c-452b-8d58-0591749c61f1" containerName="glance-log" containerID="cri-o://b44316811ed13bd8cd6d57d88f2234960eaa22263a32bcba2699e0fbebd5b53f" gracePeriod=30 Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.661208 4672 generic.go:334] "Generic (PLEG): container finished" podID="274cd1ef-76a6-45f3-ad79-690178d34d7b" containerID="865df58d2dda1bec8ab7581bccb70148613e8ede07d0b4ad84846053d02c4bec" exitCode=0 Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.661288 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mc8ns" event={"ID":"274cd1ef-76a6-45f3-ad79-690178d34d7b","Type":"ContainerDied","Data":"865df58d2dda1bec8ab7581bccb70148613e8ede07d0b4ad84846053d02c4bec"} Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.675471 4672 generic.go:334] "Generic (PLEG): container finished" podID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerID="2bb689ef487d921dbc3c597d1ca0f63e00723761e51a50a4eddb5ba466614ea2" exitCode=0 Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.675500 4672 generic.go:334] "Generic (PLEG): container finished" podID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerID="971d88de6e33fe486b6dc5a3c9a5ed7707a70486701ca849f74ddec083de4c3a" exitCode=143 Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.675551 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3abe51-0a62-4040-af4c-b5cb79a89edb","Type":"ContainerDied","Data":"2bb689ef487d921dbc3c597d1ca0f63e00723761e51a50a4eddb5ba466614ea2"} Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.675617 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3abe51-0a62-4040-af4c-b5cb79a89edb","Type":"ContainerDied","Data":"971d88de6e33fe486b6dc5a3c9a5ed7707a70486701ca849f74ddec083de4c3a"} Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.683492 4672 generic.go:334] "Generic (PLEG): container finished" podID="70296716-492c-452b-8d58-0591749c61f1" containerID="b44316811ed13bd8cd6d57d88f2234960eaa22263a32bcba2699e0fbebd5b53f" exitCode=143 Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.683576 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70296716-492c-452b-8d58-0591749c61f1","Type":"ContainerDied","Data":"b44316811ed13bd8cd6d57d88f2234960eaa22263a32bcba2699e0fbebd5b53f"} Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.692530 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerStarted","Data":"ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84"} Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.694430 4672 generic.go:334] "Generic (PLEG): container finished" podID="55b482fc-c341-4d8f-aec4-ab97e93573e8" containerID="566b8976a2d8ea8c854f6fd364fd9630aee1fc49d6ab2d9adfeba86d7df1a873" exitCode=0 Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.694940 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-89g5s" event={"ID":"55b482fc-c341-4d8f-aec4-ab97e93573e8","Type":"ContainerDied","Data":"566b8976a2d8ea8c854f6fd364fd9630aee1fc49d6ab2d9adfeba86d7df1a873"} Oct 07 15:09:21 crc kubenswrapper[4672]: I1007 15:09:21.930398 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.243098 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.250512 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pkll5" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.343958 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-scripts\") pod \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.344028 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data\") pod \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.344085 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvxvl\" (UniqueName: \"kubernetes.io/projected/294a14e2-fc34-497e-b003-f82a21411703-kube-api-access-kvxvl\") pod \"294a14e2-fc34-497e-b003-f82a21411703\" (UID: \"294a14e2-fc34-497e-b003-f82a21411703\") " Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.344150 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3abe51-0a62-4040-af4c-b5cb79a89edb-logs\") pod \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.344699 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe3abe51-0a62-4040-af4c-b5cb79a89edb-logs" (OuterVolumeSpecName: "logs") pod "fe3abe51-0a62-4040-af4c-b5cb79a89edb" (UID: "fe3abe51-0a62-4040-af4c-b5cb79a89edb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.344172 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data-custom\") pod \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.344905 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jq9d\" (UniqueName: \"kubernetes.io/projected/fe3abe51-0a62-4040-af4c-b5cb79a89edb-kube-api-access-7jq9d\") pod \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.344984 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3abe51-0a62-4040-af4c-b5cb79a89edb-etc-machine-id\") pod \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.345005 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-combined-ca-bundle\") pod \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\" (UID: \"fe3abe51-0a62-4040-af4c-b5cb79a89edb\") " Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.345379 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3abe51-0a62-4040-af4c-b5cb79a89edb-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.346184 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe3abe51-0a62-4040-af4c-b5cb79a89edb-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fe3abe51-0a62-4040-af4c-b5cb79a89edb" (UID: "fe3abe51-0a62-4040-af4c-b5cb79a89edb"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.354536 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-scripts" (OuterVolumeSpecName: "scripts") pod "fe3abe51-0a62-4040-af4c-b5cb79a89edb" (UID: "fe3abe51-0a62-4040-af4c-b5cb79a89edb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.354582 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe3abe51-0a62-4040-af4c-b5cb79a89edb-kube-api-access-7jq9d" (OuterVolumeSpecName: "kube-api-access-7jq9d") pod "fe3abe51-0a62-4040-af4c-b5cb79a89edb" (UID: "fe3abe51-0a62-4040-af4c-b5cb79a89edb"). InnerVolumeSpecName "kube-api-access-7jq9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.354606 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/294a14e2-fc34-497e-b003-f82a21411703-kube-api-access-kvxvl" (OuterVolumeSpecName: "kube-api-access-kvxvl") pod "294a14e2-fc34-497e-b003-f82a21411703" (UID: "294a14e2-fc34-497e-b003-f82a21411703"). InnerVolumeSpecName "kube-api-access-kvxvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.359231 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fe3abe51-0a62-4040-af4c-b5cb79a89edb" (UID: "fe3abe51-0a62-4040-af4c-b5cb79a89edb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.398244 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe3abe51-0a62-4040-af4c-b5cb79a89edb" (UID: "fe3abe51-0a62-4040-af4c-b5cb79a89edb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.413774 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data" (OuterVolumeSpecName: "config-data") pod "fe3abe51-0a62-4040-af4c-b5cb79a89edb" (UID: "fe3abe51-0a62-4040-af4c-b5cb79a89edb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.447687 4672 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3abe51-0a62-4040-af4c-b5cb79a89edb-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.447726 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.447739 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.447750 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.447762 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvxvl\" (UniqueName: \"kubernetes.io/projected/294a14e2-fc34-497e-b003-f82a21411703-kube-api-access-kvxvl\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.447775 4672 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3abe51-0a62-4040-af4c-b5cb79a89edb-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.447785 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jq9d\" (UniqueName: \"kubernetes.io/projected/fe3abe51-0a62-4040-af4c-b5cb79a89edb-kube-api-access-7jq9d\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.704101 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.704135 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3abe51-0a62-4040-af4c-b5cb79a89edb","Type":"ContainerDied","Data":"b930b065ff1ba5f43b2e4f88ada4d3520bf3d0966c0f79221153aeb52298d4a4"} Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.704194 4672 scope.go:117] "RemoveContainer" containerID="2bb689ef487d921dbc3c597d1ca0f63e00723761e51a50a4eddb5ba466614ea2" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.706311 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pkll5" event={"ID":"294a14e2-fc34-497e-b003-f82a21411703","Type":"ContainerDied","Data":"e9754a5be76bebed65082e45d63d04736dd772441b5a8598508fc3ac026e0314"} Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.706365 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9754a5be76bebed65082e45d63d04736dd772441b5a8598508fc3ac026e0314" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.706380 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pkll5" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.750230 4672 scope.go:117] "RemoveContainer" containerID="971d88de6e33fe486b6dc5a3c9a5ed7707a70486701ca849f74ddec083de4c3a" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.754075 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.775537 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.806639 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 07 15:09:22 crc kubenswrapper[4672]: E1007 15:09:22.807419 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerName="cinder-api" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.807432 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerName="cinder-api" Oct 07 15:09:22 crc kubenswrapper[4672]: E1007 15:09:22.807449 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerName="cinder-api-log" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.807457 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerName="cinder-api-log" Oct 07 15:09:22 crc kubenswrapper[4672]: E1007 15:09:22.807478 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="294a14e2-fc34-497e-b003-f82a21411703" containerName="mariadb-database-create" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.807483 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="294a14e2-fc34-497e-b003-f82a21411703" containerName="mariadb-database-create" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.807818 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerName="cinder-api-log" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.807842 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="294a14e2-fc34-497e-b003-f82a21411703" containerName="mariadb-database-create" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.807876 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" containerName="cinder-api" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.809604 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.812894 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.813792 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.819604 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.824840 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.962798 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6lpg\" (UniqueName: \"kubernetes.io/projected/cac7576e-f243-46c2-90cb-e62d4c822d81-kube-api-access-m6lpg\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.963181 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-public-tls-certs\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.963207 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-scripts\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.963225 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-config-data\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.963260 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-config-data-custom\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.963293 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.963333 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.963375 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cac7576e-f243-46c2-90cb-e62d4c822d81-etc-machine-id\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:22 crc kubenswrapper[4672]: I1007 15:09:22.963400 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cac7576e-f243-46c2-90cb-e62d4c822d81-logs\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.065765 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.065838 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cac7576e-f243-46c2-90cb-e62d4c822d81-etc-machine-id\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.065876 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cac7576e-f243-46c2-90cb-e62d4c822d81-logs\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.065933 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6lpg\" (UniqueName: \"kubernetes.io/projected/cac7576e-f243-46c2-90cb-e62d4c822d81-kube-api-access-m6lpg\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.065954 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-public-tls-certs\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.065972 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-scripts\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.065993 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-config-data\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.065995 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cac7576e-f243-46c2-90cb-e62d4c822d81-etc-machine-id\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.066041 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-config-data-custom\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.066168 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.066421 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cac7576e-f243-46c2-90cb-e62d4c822d81-logs\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.071228 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-config-data-custom\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.072200 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-public-tls-certs\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.073424 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.073944 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.076034 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-config-data\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.088182 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cac7576e-f243-46c2-90cb-e62d4c822d81-scripts\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.090315 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6lpg\" (UniqueName: \"kubernetes.io/projected/cac7576e-f243-46c2-90cb-e62d4c822d81-kube-api-access-m6lpg\") pod \"cinder-api-0\" (UID: \"cac7576e-f243-46c2-90cb-e62d4c822d81\") " pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.227708 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.503966 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mc8ns" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.510674 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-89g5s" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.539309 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7tv8\" (UniqueName: \"kubernetes.io/projected/274cd1ef-76a6-45f3-ad79-690178d34d7b-kube-api-access-x7tv8\") pod \"274cd1ef-76a6-45f3-ad79-690178d34d7b\" (UID: \"274cd1ef-76a6-45f3-ad79-690178d34d7b\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.539473 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xz5mt\" (UniqueName: \"kubernetes.io/projected/55b482fc-c341-4d8f-aec4-ab97e93573e8-kube-api-access-xz5mt\") pod \"55b482fc-c341-4d8f-aec4-ab97e93573e8\" (UID: \"55b482fc-c341-4d8f-aec4-ab97e93573e8\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.547977 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/274cd1ef-76a6-45f3-ad79-690178d34d7b-kube-api-access-x7tv8" (OuterVolumeSpecName: "kube-api-access-x7tv8") pod "274cd1ef-76a6-45f3-ad79-690178d34d7b" (UID: "274cd1ef-76a6-45f3-ad79-690178d34d7b"). InnerVolumeSpecName "kube-api-access-x7tv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.552797 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55b482fc-c341-4d8f-aec4-ab97e93573e8-kube-api-access-xz5mt" (OuterVolumeSpecName: "kube-api-access-xz5mt") pod "55b482fc-c341-4d8f-aec4-ab97e93573e8" (UID: "55b482fc-c341-4d8f-aec4-ab97e93573e8"). InnerVolumeSpecName "kube-api-access-xz5mt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.626974 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.628334 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.642535 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7tv8\" (UniqueName: \"kubernetes.io/projected/274cd1ef-76a6-45f3-ad79-690178d34d7b-kube-api-access-x7tv8\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.642562 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xz5mt\" (UniqueName: \"kubernetes.io/projected/55b482fc-c341-4d8f-aec4-ab97e93573e8-kube-api-access-xz5mt\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.715479 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-89g5s" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.716487 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-89g5s" event={"ID":"55b482fc-c341-4d8f-aec4-ab97e93573e8","Type":"ContainerDied","Data":"586333b22a618707e73ab2c0507f425cf5197af4fd25cf725d2fe593e8427c69"} Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.716541 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="586333b22a618707e73ab2c0507f425cf5197af4fd25cf725d2fe593e8427c69" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.718932 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mc8ns" event={"ID":"274cd1ef-76a6-45f3-ad79-690178d34d7b","Type":"ContainerDied","Data":"380e5328408e47fa8a005b11e5ccc519bc2624b2dcae8a700cc8f3c950f17d5a"} Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.718959 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="380e5328408e47fa8a005b11e5ccc519bc2624b2dcae8a700cc8f3c950f17d5a" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.719032 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mc8ns" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.724797 4672 generic.go:334] "Generic (PLEG): container finished" podID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerID="ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649" exitCode=0 Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.724864 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.724868 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ed2a01b2-d44a-48b1-8463-84d5d873b9a7","Type":"ContainerDied","Data":"ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649"} Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.725074 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ed2a01b2-d44a-48b1-8463-84d5d873b9a7","Type":"ContainerDied","Data":"baad17181cc3ba92b3113292b3bcac1b0a8d9e04b9fa019c358ca3c7b7b999bb"} Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.725104 4672 scope.go:117] "RemoveContainer" containerID="ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.739033 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerStarted","Data":"ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a"} Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.739185 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="ceilometer-central-agent" containerID="cri-o://009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357" gracePeriod=30 Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.739470 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.739514 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="proxy-httpd" containerID="cri-o://ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a" gracePeriod=30 Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.739557 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="sg-core" containerID="cri-o://ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84" gracePeriod=30 Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.739973 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="ceilometer-notification-agent" containerID="cri-o://425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b" gracePeriod=30 Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.745398 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-combined-ca-bundle\") pod \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.745528 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-scripts\") pod \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.745587 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-public-tls-certs\") pod \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.745615 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdzh2\" (UniqueName: \"kubernetes.io/projected/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-kube-api-access-cdzh2\") pod \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.745698 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-logs\") pod \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.745737 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-httpd-run\") pod \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.745787 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.745837 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-config-data\") pod \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\" (UID: \"ed2a01b2-d44a-48b1-8463-84d5d873b9a7\") " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.750494 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ed2a01b2-d44a-48b1-8463-84d5d873b9a7" (UID: "ed2a01b2-d44a-48b1-8463-84d5d873b9a7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.750955 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-logs" (OuterVolumeSpecName: "logs") pod "ed2a01b2-d44a-48b1-8463-84d5d873b9a7" (UID: "ed2a01b2-d44a-48b1-8463-84d5d873b9a7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.762200 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-scripts" (OuterVolumeSpecName: "scripts") pod "ed2a01b2-d44a-48b1-8463-84d5d873b9a7" (UID: "ed2a01b2-d44a-48b1-8463-84d5d873b9a7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.762424 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-kube-api-access-cdzh2" (OuterVolumeSpecName: "kube-api-access-cdzh2") pod "ed2a01b2-d44a-48b1-8463-84d5d873b9a7" (UID: "ed2a01b2-d44a-48b1-8463-84d5d873b9a7"). InnerVolumeSpecName "kube-api-access-cdzh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.767238 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "ed2a01b2-d44a-48b1-8463-84d5d873b9a7" (UID: "ed2a01b2-d44a-48b1-8463-84d5d873b9a7"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.781240 4672 scope.go:117] "RemoveContainer" containerID="a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.813968 4672 scope.go:117] "RemoveContainer" containerID="ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649" Oct 07 15:09:23 crc kubenswrapper[4672]: E1007 15:09:23.814528 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649\": container with ID starting with ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649 not found: ID does not exist" containerID="ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.814583 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649"} err="failed to get container status \"ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649\": rpc error: code = NotFound desc = could not find container \"ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649\": container with ID starting with ad118d078e4d48178934f0aaad9d67e8cdce68d4918d738dd019990567749649 not found: ID does not exist" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.814615 4672 scope.go:117] "RemoveContainer" containerID="a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.815302 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed2a01b2-d44a-48b1-8463-84d5d873b9a7" (UID: "ed2a01b2-d44a-48b1-8463-84d5d873b9a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: E1007 15:09:23.818800 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d\": container with ID starting with a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d not found: ID does not exist" containerID="a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.818851 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d"} err="failed to get container status \"a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d\": rpc error: code = NotFound desc = could not find container \"a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d\": container with ID starting with a81bf5779458fba07363a2f0ccce339c75ec368029c3081595682c87f6ed314d not found: ID does not exist" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.843244 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.260282045 podStartE2EDuration="7.843219334s" podCreationTimestamp="2025-10-07 15:09:16 +0000 UTC" firstStartedPulling="2025-10-07 15:09:17.514921791 +0000 UTC m=+1234.490100362" lastFinishedPulling="2025-10-07 15:09:23.09785907 +0000 UTC m=+1240.073037651" observedRunningTime="2025-10-07 15:09:23.773319811 +0000 UTC m=+1240.748498402" watchObservedRunningTime="2025-10-07 15:09:23.843219334 +0000 UTC m=+1240.818397915" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.843823 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ed2a01b2-d44a-48b1-8463-84d5d873b9a7" (UID: "ed2a01b2-d44a-48b1-8463-84d5d873b9a7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.848964 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.848993 4672 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.849038 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdzh2\" (UniqueName: \"kubernetes.io/projected/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-kube-api-access-cdzh2\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.849049 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.849057 4672 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.849081 4672 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.849090 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.856503 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.868277 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-config-data" (OuterVolumeSpecName: "config-data") pod "ed2a01b2-d44a-48b1-8463-84d5d873b9a7" (UID: "ed2a01b2-d44a-48b1-8463-84d5d873b9a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.886656 4672 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.903696 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe3abe51-0a62-4040-af4c-b5cb79a89edb" path="/var/lib/kubelet/pods/fe3abe51-0a62-4040-af4c-b5cb79a89edb/volumes" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.951039 4672 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:23 crc kubenswrapper[4672]: I1007 15:09:23.951083 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed2a01b2-d44a-48b1-8463-84d5d873b9a7-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.054828 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.078592 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.092956 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:09:24 crc kubenswrapper[4672]: E1007 15:09:24.093782 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55b482fc-c341-4d8f-aec4-ab97e93573e8" containerName="mariadb-database-create" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.093803 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="55b482fc-c341-4d8f-aec4-ab97e93573e8" containerName="mariadb-database-create" Oct 07 15:09:24 crc kubenswrapper[4672]: E1007 15:09:24.093849 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerName="glance-log" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.093857 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerName="glance-log" Oct 07 15:09:24 crc kubenswrapper[4672]: E1007 15:09:24.093883 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="274cd1ef-76a6-45f3-ad79-690178d34d7b" containerName="mariadb-database-create" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.093891 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="274cd1ef-76a6-45f3-ad79-690178d34d7b" containerName="mariadb-database-create" Oct 07 15:09:24 crc kubenswrapper[4672]: E1007 15:09:24.093905 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerName="glance-httpd" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.093914 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerName="glance-httpd" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.094130 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="274cd1ef-76a6-45f3-ad79-690178d34d7b" containerName="mariadb-database-create" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.094169 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerName="glance-log" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.094184 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" containerName="glance-httpd" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.094194 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="55b482fc-c341-4d8f-aec4-ab97e93573e8" containerName="mariadb-database-create" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.095296 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.098988 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.099188 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.101861 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.257275 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-config-data\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.257653 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.257792 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.257934 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e66032dd-633c-4fa7-b39c-714c4c799aed-logs\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.258112 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgkjb\" (UniqueName: \"kubernetes.io/projected/e66032dd-633c-4fa7-b39c-714c4c799aed-kube-api-access-vgkjb\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.258258 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.258380 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e66032dd-633c-4fa7-b39c-714c4c799aed-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.258462 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-scripts\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.360251 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e66032dd-633c-4fa7-b39c-714c4c799aed-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.360540 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-scripts\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.360709 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-config-data\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.360868 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.360970 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.361112 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e66032dd-633c-4fa7-b39c-714c4c799aed-logs\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.361210 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgkjb\" (UniqueName: \"kubernetes.io/projected/e66032dd-633c-4fa7-b39c-714c4c799aed-kube-api-access-vgkjb\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.361314 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.360778 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e66032dd-633c-4fa7-b39c-714c4c799aed-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.361821 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.362633 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e66032dd-633c-4fa7-b39c-714c4c799aed-logs\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.366321 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-scripts\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.369152 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.371594 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-config-data\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.373481 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e66032dd-633c-4fa7-b39c-714c4c799aed-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.382766 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgkjb\" (UniqueName: \"kubernetes.io/projected/e66032dd-633c-4fa7-b39c-714c4c799aed-kube-api-access-vgkjb\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.394637 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"e66032dd-633c-4fa7-b39c-714c4c799aed\") " pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.432332 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.798385 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"cac7576e-f243-46c2-90cb-e62d4c822d81","Type":"ContainerStarted","Data":"e2b2b13bc7a55c1e130235c783442830254c3f1e4fc3f2a2c74fe3fb4ba296af"} Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.799087 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"cac7576e-f243-46c2-90cb-e62d4c822d81","Type":"ContainerStarted","Data":"e579a535b50a68ec146eda1e899cd86e9806beaa9504ef1299a0cffe19efb0cd"} Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.806131 4672 generic.go:334] "Generic (PLEG): container finished" podID="70296716-492c-452b-8d58-0591749c61f1" containerID="c7f77eb1b8370e6a8572b2ab38b36b61b5604f5c74127e666a98d354b1cf10f8" exitCode=0 Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.806192 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70296716-492c-452b-8d58-0591749c61f1","Type":"ContainerDied","Data":"c7f77eb1b8370e6a8572b2ab38b36b61b5604f5c74127e666a98d354b1cf10f8"} Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.809729 4672 generic.go:334] "Generic (PLEG): container finished" podID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerID="ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a" exitCode=0 Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.809807 4672 generic.go:334] "Generic (PLEG): container finished" podID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerID="ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84" exitCode=2 Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.809816 4672 generic.go:334] "Generic (PLEG): container finished" podID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerID="425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b" exitCode=0 Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.809873 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerDied","Data":"ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a"} Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.809900 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerDied","Data":"ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84"} Oct 07 15:09:24 crc kubenswrapper[4672]: I1007 15:09:24.809909 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerDied","Data":"425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b"} Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.060400 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.308036 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.489599 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-combined-ca-bundle\") pod \"70296716-492c-452b-8d58-0591749c61f1\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.489950 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"70296716-492c-452b-8d58-0591749c61f1\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.490009 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-config-data\") pod \"70296716-492c-452b-8d58-0591749c61f1\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.490060 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-httpd-run\") pod \"70296716-492c-452b-8d58-0591749c61f1\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.490095 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4zhj\" (UniqueName: \"kubernetes.io/projected/70296716-492c-452b-8d58-0591749c61f1-kube-api-access-n4zhj\") pod \"70296716-492c-452b-8d58-0591749c61f1\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.490185 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-internal-tls-certs\") pod \"70296716-492c-452b-8d58-0591749c61f1\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.490262 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-logs\") pod \"70296716-492c-452b-8d58-0591749c61f1\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.490282 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-scripts\") pod \"70296716-492c-452b-8d58-0591749c61f1\" (UID: \"70296716-492c-452b-8d58-0591749c61f1\") " Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.490902 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "70296716-492c-452b-8d58-0591749c61f1" (UID: "70296716-492c-452b-8d58-0591749c61f1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.491415 4672 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.496597 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-logs" (OuterVolumeSpecName: "logs") pod "70296716-492c-452b-8d58-0591749c61f1" (UID: "70296716-492c-452b-8d58-0591749c61f1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.497451 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70296716-492c-452b-8d58-0591749c61f1-kube-api-access-n4zhj" (OuterVolumeSpecName: "kube-api-access-n4zhj") pod "70296716-492c-452b-8d58-0591749c61f1" (UID: "70296716-492c-452b-8d58-0591749c61f1"). InnerVolumeSpecName "kube-api-access-n4zhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.511930 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "70296716-492c-452b-8d58-0591749c61f1" (UID: "70296716-492c-452b-8d58-0591749c61f1"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.514108 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-scripts" (OuterVolumeSpecName: "scripts") pod "70296716-492c-452b-8d58-0591749c61f1" (UID: "70296716-492c-452b-8d58-0591749c61f1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.594348 4672 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.594388 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4zhj\" (UniqueName: \"kubernetes.io/projected/70296716-492c-452b-8d58-0591749c61f1-kube-api-access-n4zhj\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.594402 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70296716-492c-452b-8d58-0591749c61f1-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.594411 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.627427 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "70296716-492c-452b-8d58-0591749c61f1" (UID: "70296716-492c-452b-8d58-0591749c61f1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.630227 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70296716-492c-452b-8d58-0591749c61f1" (UID: "70296716-492c-452b-8d58-0591749c61f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.649942 4672 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.651199 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-config-data" (OuterVolumeSpecName: "config-data") pod "70296716-492c-452b-8d58-0591749c61f1" (UID: "70296716-492c-452b-8d58-0591749c61f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.695942 4672 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.695992 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.696004 4672 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.696039 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70296716-492c-452b-8d58-0591749c61f1-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.822711 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e66032dd-633c-4fa7-b39c-714c4c799aed","Type":"ContainerStarted","Data":"919ce899df6e9c1e56727b90be3ed33f412828fbbd792026c6df0c1978e4b17d"} Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.823027 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e66032dd-633c-4fa7-b39c-714c4c799aed","Type":"ContainerStarted","Data":"9995c0a9069df842d0116379f5062d75ba3102b6c7c5ad62166b8ce1e7c5b1cb"} Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.824964 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70296716-492c-452b-8d58-0591749c61f1","Type":"ContainerDied","Data":"65df83a2965111f14c864ad9a4e9a0975fc24e5f02678948c1319baf860d36cd"} Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.824978 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.825043 4672 scope.go:117] "RemoveContainer" containerID="c7f77eb1b8370e6a8572b2ab38b36b61b5604f5c74127e666a98d354b1cf10f8" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.829651 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"cac7576e-f243-46c2-90cb-e62d4c822d81","Type":"ContainerStarted","Data":"a8dd935b0e2215e3c15be351bdf95138ce2da4aa0815b606ee42723589528cce"} Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.829907 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.851035 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.850999095 podStartE2EDuration="3.850999095s" podCreationTimestamp="2025-10-07 15:09:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:25.847420011 +0000 UTC m=+1242.822598602" watchObservedRunningTime="2025-10-07 15:09:25.850999095 +0000 UTC m=+1242.826177666" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.852518 4672 scope.go:117] "RemoveContainer" containerID="b44316811ed13bd8cd6d57d88f2234960eaa22263a32bcba2699e0fbebd5b53f" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.873988 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.925490 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed2a01b2-d44a-48b1-8463-84d5d873b9a7" path="/var/lib/kubelet/pods/ed2a01b2-d44a-48b1-8463-84d5d873b9a7/volumes" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.926201 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.926232 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:09:25 crc kubenswrapper[4672]: E1007 15:09:25.926516 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70296716-492c-452b-8d58-0591749c61f1" containerName="glance-log" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.926528 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="70296716-492c-452b-8d58-0591749c61f1" containerName="glance-log" Oct 07 15:09:25 crc kubenswrapper[4672]: E1007 15:09:25.926542 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70296716-492c-452b-8d58-0591749c61f1" containerName="glance-httpd" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.926549 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="70296716-492c-452b-8d58-0591749c61f1" containerName="glance-httpd" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.926981 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="70296716-492c-452b-8d58-0591749c61f1" containerName="glance-httpd" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.927001 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="70296716-492c-452b-8d58-0591749c61f1" containerName="glance-log" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.927907 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.937496 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.937633 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 15:09:25 crc kubenswrapper[4672]: I1007 15:09:25.941476 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.106342 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.106400 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.106563 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/180e3a93-5be7-42c2-832c-86e29fb5444d-logs\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.106609 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sgvl\" (UniqueName: \"kubernetes.io/projected/180e3a93-5be7-42c2-832c-86e29fb5444d-kube-api-access-7sgvl\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.106697 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/180e3a93-5be7-42c2-832c-86e29fb5444d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.106812 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.107042 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.107071 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.208436 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/180e3a93-5be7-42c2-832c-86e29fb5444d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.208491 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.208560 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.208580 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.208617 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.208642 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.208673 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/180e3a93-5be7-42c2-832c-86e29fb5444d-logs\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.208689 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sgvl\" (UniqueName: \"kubernetes.io/projected/180e3a93-5be7-42c2-832c-86e29fb5444d-kube-api-access-7sgvl\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.209199 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/180e3a93-5be7-42c2-832c-86e29fb5444d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.209419 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.209508 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/180e3a93-5be7-42c2-832c-86e29fb5444d-logs\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.213503 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.220149 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.220842 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.223826 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/180e3a93-5be7-42c2-832c-86e29fb5444d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.226809 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sgvl\" (UniqueName: \"kubernetes.io/projected/180e3a93-5be7-42c2-832c-86e29fb5444d-kube-api-access-7sgvl\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.237499 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"180e3a93-5be7-42c2-832c-86e29fb5444d\") " pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.252985 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.721761 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.823780 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-config-data\") pod \"88c00ca7-b902-498c-8c6a-3e44c0b54419\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.823902 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-sg-core-conf-yaml\") pod \"88c00ca7-b902-498c-8c6a-3e44c0b54419\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.823953 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-run-httpd\") pod \"88c00ca7-b902-498c-8c6a-3e44c0b54419\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.824034 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-scripts\") pod \"88c00ca7-b902-498c-8c6a-3e44c0b54419\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.824130 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-log-httpd\") pod \"88c00ca7-b902-498c-8c6a-3e44c0b54419\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.824167 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-combined-ca-bundle\") pod \"88c00ca7-b902-498c-8c6a-3e44c0b54419\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.824242 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qr48\" (UniqueName: \"kubernetes.io/projected/88c00ca7-b902-498c-8c6a-3e44c0b54419-kube-api-access-8qr48\") pod \"88c00ca7-b902-498c-8c6a-3e44c0b54419\" (UID: \"88c00ca7-b902-498c-8c6a-3e44c0b54419\") " Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.827255 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "88c00ca7-b902-498c-8c6a-3e44c0b54419" (UID: "88c00ca7-b902-498c-8c6a-3e44c0b54419"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.827286 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "88c00ca7-b902-498c-8c6a-3e44c0b54419" (UID: "88c00ca7-b902-498c-8c6a-3e44c0b54419"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.836115 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88c00ca7-b902-498c-8c6a-3e44c0b54419-kube-api-access-8qr48" (OuterVolumeSpecName: "kube-api-access-8qr48") pod "88c00ca7-b902-498c-8c6a-3e44c0b54419" (UID: "88c00ca7-b902-498c-8c6a-3e44c0b54419"). InnerVolumeSpecName "kube-api-access-8qr48". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.841761 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-scripts" (OuterVolumeSpecName: "scripts") pod "88c00ca7-b902-498c-8c6a-3e44c0b54419" (UID: "88c00ca7-b902-498c-8c6a-3e44c0b54419"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.878352 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e66032dd-633c-4fa7-b39c-714c4c799aed","Type":"ContainerStarted","Data":"392f4de0f84b19d0cf325a131b63b5b991d0338a7d06bbe3fdbb1e7a4b9c3f75"} Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.886167 4672 generic.go:334] "Generic (PLEG): container finished" podID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerID="009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357" exitCode=0 Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.886232 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.886295 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerDied","Data":"009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357"} Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.886327 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88c00ca7-b902-498c-8c6a-3e44c0b54419","Type":"ContainerDied","Data":"a6886e58ad833a89038c252ada71a393371b1fccb368722cd05ae1bc47e0b7f6"} Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.886348 4672 scope.go:117] "RemoveContainer" containerID="ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.919334 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "88c00ca7-b902-498c-8c6a-3e44c0b54419" (UID: "88c00ca7-b902-498c-8c6a-3e44c0b54419"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.926647 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=2.926627987 podStartE2EDuration="2.926627987s" podCreationTimestamp="2025-10-07 15:09:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:26.91020298 +0000 UTC m=+1243.885381571" watchObservedRunningTime="2025-10-07 15:09:26.926627987 +0000 UTC m=+1243.901806568" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.927454 4672 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.927680 4672 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.927760 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.927823 4672 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88c00ca7-b902-498c-8c6a-3e44c0b54419-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.927905 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qr48\" (UniqueName: \"kubernetes.io/projected/88c00ca7-b902-498c-8c6a-3e44c0b54419-kube-api-access-8qr48\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.928088 4672 scope.go:117] "RemoveContainer" containerID="ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.968484 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.973953 4672 scope.go:117] "RemoveContainer" containerID="425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b" Oct 07 15:09:26 crc kubenswrapper[4672]: I1007 15:09:26.982138 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88c00ca7-b902-498c-8c6a-3e44c0b54419" (UID: "88c00ca7-b902-498c-8c6a-3e44c0b54419"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:26 crc kubenswrapper[4672]: W1007 15:09:26.986239 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod180e3a93_5be7_42c2_832c_86e29fb5444d.slice/crio-94b2dd56f1f27cfe530eca129b959e074233b8ff0d7e30f0e61de96912eec4cd WatchSource:0}: Error finding container 94b2dd56f1f27cfe530eca129b959e074233b8ff0d7e30f0e61de96912eec4cd: Status 404 returned error can't find the container with id 94b2dd56f1f27cfe530eca129b959e074233b8ff0d7e30f0e61de96912eec4cd Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.010037 4672 scope.go:117] "RemoveContainer" containerID="009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.031204 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-config-data" (OuterVolumeSpecName: "config-data") pod "88c00ca7-b902-498c-8c6a-3e44c0b54419" (UID: "88c00ca7-b902-498c-8c6a-3e44c0b54419"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.031857 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.031891 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88c00ca7-b902-498c-8c6a-3e44c0b54419-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.044558 4672 scope.go:117] "RemoveContainer" containerID="ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a" Oct 07 15:09:27 crc kubenswrapper[4672]: E1007 15:09:27.045237 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a\": container with ID starting with ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a not found: ID does not exist" containerID="ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.045290 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a"} err="failed to get container status \"ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a\": rpc error: code = NotFound desc = could not find container \"ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a\": container with ID starting with ec14d8b5a605b14e6579c88bc64c91849cd7c0ccc7590005696f68b69029008a not found: ID does not exist" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.045321 4672 scope.go:117] "RemoveContainer" containerID="ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84" Oct 07 15:09:27 crc kubenswrapper[4672]: E1007 15:09:27.045791 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84\": container with ID starting with ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84 not found: ID does not exist" containerID="ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.045808 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84"} err="failed to get container status \"ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84\": rpc error: code = NotFound desc = could not find container \"ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84\": container with ID starting with ecd6c697dd905447c4ced51796bde3c84082b98a0b01a89a8f80000f6d717c84 not found: ID does not exist" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.045820 4672 scope.go:117] "RemoveContainer" containerID="425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b" Oct 07 15:09:27 crc kubenswrapper[4672]: E1007 15:09:27.046189 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b\": container with ID starting with 425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b not found: ID does not exist" containerID="425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.046205 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b"} err="failed to get container status \"425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b\": rpc error: code = NotFound desc = could not find container \"425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b\": container with ID starting with 425955a6d78e6b3217a66153ea29090eca0bb00a0bd5f8c4828bfd3c0605442b not found: ID does not exist" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.046217 4672 scope.go:117] "RemoveContainer" containerID="009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357" Oct 07 15:09:27 crc kubenswrapper[4672]: E1007 15:09:27.046561 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357\": container with ID starting with 009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357 not found: ID does not exist" containerID="009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.046591 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357"} err="failed to get container status \"009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357\": rpc error: code = NotFound desc = could not find container \"009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357\": container with ID starting with 009b9e4c02d9ab7410ae9f00164bbc66bbe76276461ae7a7ed24c0e94734d357 not found: ID does not exist" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.079247 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.148761 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cf7bc7f99-xbd4f"] Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.156369 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" podUID="f80774c7-30a9-4805-a540-489ff4f886ba" containerName="dnsmasq-dns" containerID="cri-o://37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248" gracePeriod=10 Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.323616 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.369289 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.405488 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.418150 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.422520 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:27 crc kubenswrapper[4672]: E1007 15:09:27.422980 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="ceilometer-central-agent" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.422999 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="ceilometer-central-agent" Oct 07 15:09:27 crc kubenswrapper[4672]: E1007 15:09:27.423014 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="ceilometer-notification-agent" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.423034 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="ceilometer-notification-agent" Oct 07 15:09:27 crc kubenswrapper[4672]: E1007 15:09:27.423052 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="sg-core" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.423060 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="sg-core" Oct 07 15:09:27 crc kubenswrapper[4672]: E1007 15:09:27.423114 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="proxy-httpd" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.423125 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="proxy-httpd" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.423873 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="ceilometer-notification-agent" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.423899 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="ceilometer-central-agent" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.423919 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="proxy-httpd" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.423938 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" containerName="sg-core" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.426410 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.430872 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.431132 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.432528 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.542348 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-log-httpd\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.542393 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-run-httpd\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.542420 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-scripts\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.542439 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-config-data\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.542668 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2gc9\" (UniqueName: \"kubernetes.io/projected/0bd7d3a3-a711-47e1-b105-9865629b64a2-kube-api-access-x2gc9\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.542753 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.542886 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.644257 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-log-httpd\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.644557 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-run-httpd\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.644581 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-scripts\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.644602 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-config-data\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.644643 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2gc9\" (UniqueName: \"kubernetes.io/projected/0bd7d3a3-a711-47e1-b105-9865629b64a2-kube-api-access-x2gc9\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.644679 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.645411 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-log-httpd\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.645465 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-run-httpd\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.646026 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.650940 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-config-data\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.652726 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.652894 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.655550 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-scripts\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.664371 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2gc9\" (UniqueName: \"kubernetes.io/projected/0bd7d3a3-a711-47e1-b105-9865629b64a2-kube-api-access-x2gc9\") pod \"ceilometer-0\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.756004 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.771827 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.848674 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-svc\") pod \"f80774c7-30a9-4805-a540-489ff4f886ba\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.848793 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-swift-storage-0\") pod \"f80774c7-30a9-4805-a540-489ff4f886ba\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.848861 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-config\") pod \"f80774c7-30a9-4805-a540-489ff4f886ba\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.848908 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-sb\") pod \"f80774c7-30a9-4805-a540-489ff4f886ba\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.848989 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk6sg\" (UniqueName: \"kubernetes.io/projected/f80774c7-30a9-4805-a540-489ff4f886ba-kube-api-access-kk6sg\") pod \"f80774c7-30a9-4805-a540-489ff4f886ba\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.849082 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-nb\") pod \"f80774c7-30a9-4805-a540-489ff4f886ba\" (UID: \"f80774c7-30a9-4805-a540-489ff4f886ba\") " Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.853932 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f80774c7-30a9-4805-a540-489ff4f886ba-kube-api-access-kk6sg" (OuterVolumeSpecName: "kube-api-access-kk6sg") pod "f80774c7-30a9-4805-a540-489ff4f886ba" (UID: "f80774c7-30a9-4805-a540-489ff4f886ba"). InnerVolumeSpecName "kube-api-access-kk6sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.927399 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f80774c7-30a9-4805-a540-489ff4f886ba" (UID: "f80774c7-30a9-4805-a540-489ff4f886ba"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.927406 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f80774c7-30a9-4805-a540-489ff4f886ba" (UID: "f80774c7-30a9-4805-a540-489ff4f886ba"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.930961 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f80774c7-30a9-4805-a540-489ff4f886ba" (UID: "f80774c7-30a9-4805-a540-489ff4f886ba"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.931998 4672 generic.go:334] "Generic (PLEG): container finished" podID="f80774c7-30a9-4805-a540-489ff4f886ba" containerID="37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248" exitCode=0 Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.932163 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.933914 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70296716-492c-452b-8d58-0591749c61f1" path="/var/lib/kubelet/pods/70296716-492c-452b-8d58-0591749c61f1/volumes" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.935545 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88c00ca7-b902-498c-8c6a-3e44c0b54419" path="/var/lib/kubelet/pods/88c00ca7-b902-498c-8c6a-3e44c0b54419/volumes" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.938461 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerName="cinder-scheduler" containerID="cri-o://e9439c7c8c5df960de1d7f8b46766647496d0b182d4360275a52942c4ba15005" gracePeriod=30 Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.938581 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerName="probe" containerID="cri-o://d2ef298d5c4474114598239077141110864832749127e8d10c543e0a691743f1" gracePeriod=30 Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.940959 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f80774c7-30a9-4805-a540-489ff4f886ba" (UID: "f80774c7-30a9-4805-a540-489ff4f886ba"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.946042 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" event={"ID":"f80774c7-30a9-4805-a540-489ff4f886ba","Type":"ContainerDied","Data":"37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248"} Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.946210 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf7bc7f99-xbd4f" event={"ID":"f80774c7-30a9-4805-a540-489ff4f886ba","Type":"ContainerDied","Data":"63c3504c6819224b196ee3bb830e6e18a1808945b03a93cfb85f82be52b37ace"} Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.946226 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"180e3a93-5be7-42c2-832c-86e29fb5444d","Type":"ContainerStarted","Data":"380ee7fef2953e91371850806599fa7bdd9b6ed53b9335e95e0702aa74f196c1"} Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.946241 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"180e3a93-5be7-42c2-832c-86e29fb5444d","Type":"ContainerStarted","Data":"94b2dd56f1f27cfe530eca129b959e074233b8ff0d7e30f0e61de96912eec4cd"} Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.946261 4672 scope.go:117] "RemoveContainer" containerID="37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.951975 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.952011 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.952030 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.952042 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk6sg\" (UniqueName: \"kubernetes.io/projected/f80774c7-30a9-4805-a540-489ff4f886ba-kube-api-access-kk6sg\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.952058 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.961903 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-config" (OuterVolumeSpecName: "config") pod "f80774c7-30a9-4805-a540-489ff4f886ba" (UID: "f80774c7-30a9-4805-a540-489ff4f886ba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:09:27 crc kubenswrapper[4672]: I1007 15:09:27.992380 4672 scope.go:117] "RemoveContainer" containerID="7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.044283 4672 scope.go:117] "RemoveContainer" containerID="37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248" Oct 07 15:09:28 crc kubenswrapper[4672]: E1007 15:09:28.044820 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248\": container with ID starting with 37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248 not found: ID does not exist" containerID="37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.044850 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248"} err="failed to get container status \"37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248\": rpc error: code = NotFound desc = could not find container \"37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248\": container with ID starting with 37559a471d3b5d15ef0fd4a118dca2c1b48ebab79084608592d2851769325248 not found: ID does not exist" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.044876 4672 scope.go:117] "RemoveContainer" containerID="7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c" Oct 07 15:09:28 crc kubenswrapper[4672]: E1007 15:09:28.045230 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c\": container with ID starting with 7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c not found: ID does not exist" containerID="7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.045253 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c"} err="failed to get container status \"7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c\": rpc error: code = NotFound desc = could not find container \"7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c\": container with ID starting with 7282dcb25cb80f19dbff49e1abc16ce54fb94af504311d3f5a7f7eff8955c54c not found: ID does not exist" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.055477 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f80774c7-30a9-4805-a540-489ff4f886ba-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.262059 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.278524 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cf7bc7f99-xbd4f"] Oct 07 15:09:28 crc kubenswrapper[4672]: W1007 15:09:28.285100 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bd7d3a3_a711_47e1_b105_9865629b64a2.slice/crio-6265daf3f237d7d71ff4cb38763934658c18a67ae323cee6faadf8d72071a42e WatchSource:0}: Error finding container 6265daf3f237d7d71ff4cb38763934658c18a67ae323cee6faadf8d72071a42e: Status 404 returned error can't find the container with id 6265daf3f237d7d71ff4cb38763934658c18a67ae323cee6faadf8d72071a42e Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.320704 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6cf7bc7f99-xbd4f"] Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.757963 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-c54c-account-create-q95c7"] Oct 07 15:09:28 crc kubenswrapper[4672]: E1007 15:09:28.758318 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f80774c7-30a9-4805-a540-489ff4f886ba" containerName="dnsmasq-dns" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.758334 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f80774c7-30a9-4805-a540-489ff4f886ba" containerName="dnsmasq-dns" Oct 07 15:09:28 crc kubenswrapper[4672]: E1007 15:09:28.758348 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f80774c7-30a9-4805-a540-489ff4f886ba" containerName="init" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.758354 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f80774c7-30a9-4805-a540-489ff4f886ba" containerName="init" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.758527 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f80774c7-30a9-4805-a540-489ff4f886ba" containerName="dnsmasq-dns" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.759113 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c54c-account-create-q95c7" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.760958 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.770745 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c54c-account-create-q95c7"] Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.870824 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jhwb\" (UniqueName: \"kubernetes.io/projected/27bd3313-2b8f-4ae9-acc8-f88633249dd9-kube-api-access-6jhwb\") pod \"nova-api-c54c-account-create-q95c7\" (UID: \"27bd3313-2b8f-4ae9-acc8-f88633249dd9\") " pod="openstack/nova-api-c54c-account-create-q95c7" Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.965260 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"180e3a93-5be7-42c2-832c-86e29fb5444d","Type":"ContainerStarted","Data":"c43210729372d1e0d366b7f226dc4a302a1126683cf81526dac53e923b753155"} Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.971806 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerStarted","Data":"eba5ca58caf26d0b6293aa0888dd7ca75f6f8af626653ac40617f92a9fca5862"} Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.971851 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerStarted","Data":"9771fb9a1645c29b223683ad803761922b56903de431739534be10627b8efb41"} Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.971861 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerStarted","Data":"6265daf3f237d7d71ff4cb38763934658c18a67ae323cee6faadf8d72071a42e"} Oct 07 15:09:28 crc kubenswrapper[4672]: I1007 15:09:28.972430 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jhwb\" (UniqueName: \"kubernetes.io/projected/27bd3313-2b8f-4ae9-acc8-f88633249dd9-kube-api-access-6jhwb\") pod \"nova-api-c54c-account-create-q95c7\" (UID: \"27bd3313-2b8f-4ae9-acc8-f88633249dd9\") " pod="openstack/nova-api-c54c-account-create-q95c7" Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.000957 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.000930353 podStartE2EDuration="4.000930353s" podCreationTimestamp="2025-10-07 15:09:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:28.991579201 +0000 UTC m=+1245.966757782" watchObservedRunningTime="2025-10-07 15:09:29.000930353 +0000 UTC m=+1245.976108934" Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.014927 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jhwb\" (UniqueName: \"kubernetes.io/projected/27bd3313-2b8f-4ae9-acc8-f88633249dd9-kube-api-access-6jhwb\") pod \"nova-api-c54c-account-create-q95c7\" (UID: \"27bd3313-2b8f-4ae9-acc8-f88633249dd9\") " pod="openstack/nova-api-c54c-account-create-q95c7" Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.091619 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c54c-account-create-q95c7" Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.595680 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c54c-account-create-q95c7"] Oct 07 15:09:29 crc kubenswrapper[4672]: W1007 15:09:29.605863 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27bd3313_2b8f_4ae9_acc8_f88633249dd9.slice/crio-ce0deedbd3294062f2e4fe4bc419b4c1e6f0b030a163f877888b3a741adba150 WatchSource:0}: Error finding container ce0deedbd3294062f2e4fe4bc419b4c1e6f0b030a163f877888b3a741adba150: Status 404 returned error can't find the container with id ce0deedbd3294062f2e4fe4bc419b4c1e6f0b030a163f877888b3a741adba150 Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.905874 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f80774c7-30a9-4805-a540-489ff4f886ba" path="/var/lib/kubelet/pods/f80774c7-30a9-4805-a540-489ff4f886ba/volumes" Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.985062 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerStarted","Data":"6ec4b4e568f4fb0d2c37640c1aeb7fb978a936312f2e72c53577af7cf8bb39a6"} Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.987200 4672 generic.go:334] "Generic (PLEG): container finished" podID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerID="d2ef298d5c4474114598239077141110864832749127e8d10c543e0a691743f1" exitCode=0 Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.987252 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2aa440b2-4922-4a96-bc12-38f2ae3e374a","Type":"ContainerDied","Data":"d2ef298d5c4474114598239077141110864832749127e8d10c543e0a691743f1"} Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.990273 4672 generic.go:334] "Generic (PLEG): container finished" podID="27bd3313-2b8f-4ae9-acc8-f88633249dd9" containerID="9c1b7312a361c94fb586737ef34e50131f7efa6f2d21992ce511ca29c9aa14f9" exitCode=0 Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.991149 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c54c-account-create-q95c7" event={"ID":"27bd3313-2b8f-4ae9-acc8-f88633249dd9","Type":"ContainerDied","Data":"9c1b7312a361c94fb586737ef34e50131f7efa6f2d21992ce511ca29c9aa14f9"} Oct 07 15:09:29 crc kubenswrapper[4672]: I1007 15:09:29.991179 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c54c-account-create-q95c7" event={"ID":"27bd3313-2b8f-4ae9-acc8-f88633249dd9","Type":"ContainerStarted","Data":"ce0deedbd3294062f2e4fe4bc419b4c1e6f0b030a163f877888b3a741adba150"} Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.006236 4672 generic.go:334] "Generic (PLEG): container finished" podID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerID="e9439c7c8c5df960de1d7f8b46766647496d0b182d4360275a52942c4ba15005" exitCode=0 Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.006339 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2aa440b2-4922-4a96-bc12-38f2ae3e374a","Type":"ContainerDied","Data":"e9439c7c8c5df960de1d7f8b46766647496d0b182d4360275a52942c4ba15005"} Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.285904 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.418115 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data-custom\") pod \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.418229 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data\") pod \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.418264 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-combined-ca-bundle\") pod \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.418315 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5dmz\" (UniqueName: \"kubernetes.io/projected/2aa440b2-4922-4a96-bc12-38f2ae3e374a-kube-api-access-l5dmz\") pod \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.418366 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2aa440b2-4922-4a96-bc12-38f2ae3e374a-etc-machine-id\") pod \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.418432 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-scripts\") pod \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\" (UID: \"2aa440b2-4922-4a96-bc12-38f2ae3e374a\") " Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.418738 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2aa440b2-4922-4a96-bc12-38f2ae3e374a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2aa440b2-4922-4a96-bc12-38f2ae3e374a" (UID: "2aa440b2-4922-4a96-bc12-38f2ae3e374a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.419233 4672 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2aa440b2-4922-4a96-bc12-38f2ae3e374a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.425161 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-scripts" (OuterVolumeSpecName: "scripts") pod "2aa440b2-4922-4a96-bc12-38f2ae3e374a" (UID: "2aa440b2-4922-4a96-bc12-38f2ae3e374a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.425209 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2aa440b2-4922-4a96-bc12-38f2ae3e374a" (UID: "2aa440b2-4922-4a96-bc12-38f2ae3e374a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.425728 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2aa440b2-4922-4a96-bc12-38f2ae3e374a-kube-api-access-l5dmz" (OuterVolumeSpecName: "kube-api-access-l5dmz") pod "2aa440b2-4922-4a96-bc12-38f2ae3e374a" (UID: "2aa440b2-4922-4a96-bc12-38f2ae3e374a"). InnerVolumeSpecName "kube-api-access-l5dmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.482481 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2aa440b2-4922-4a96-bc12-38f2ae3e374a" (UID: "2aa440b2-4922-4a96-bc12-38f2ae3e374a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.502342 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.520770 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5dmz\" (UniqueName: \"kubernetes.io/projected/2aa440b2-4922-4a96-bc12-38f2ae3e374a-kube-api-access-l5dmz\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.520804 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.520814 4672 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.520823 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.539120 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data" (OuterVolumeSpecName: "config-data") pod "2aa440b2-4922-4a96-bc12-38f2ae3e374a" (UID: "2aa440b2-4922-4a96-bc12-38f2ae3e374a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.610733 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c54c-account-create-q95c7" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.622635 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aa440b2-4922-4a96-bc12-38f2ae3e374a-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.723934 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jhwb\" (UniqueName: \"kubernetes.io/projected/27bd3313-2b8f-4ae9-acc8-f88633249dd9-kube-api-access-6jhwb\") pod \"27bd3313-2b8f-4ae9-acc8-f88633249dd9\" (UID: \"27bd3313-2b8f-4ae9-acc8-f88633249dd9\") " Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.729292 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27bd3313-2b8f-4ae9-acc8-f88633249dd9-kube-api-access-6jhwb" (OuterVolumeSpecName: "kube-api-access-6jhwb") pod "27bd3313-2b8f-4ae9-acc8-f88633249dd9" (UID: "27bd3313-2b8f-4ae9-acc8-f88633249dd9"). InnerVolumeSpecName "kube-api-access-6jhwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:31 crc kubenswrapper[4672]: I1007 15:09:31.826688 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jhwb\" (UniqueName: \"kubernetes.io/projected/27bd3313-2b8f-4ae9-acc8-f88633249dd9-kube-api-access-6jhwb\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.018341 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2aa440b2-4922-4a96-bc12-38f2ae3e374a","Type":"ContainerDied","Data":"9b9d0a64ee32c472dfc492349c89081665d3894cdb3788a83171cef7279d967f"} Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.018396 4672 scope.go:117] "RemoveContainer" containerID="d2ef298d5c4474114598239077141110864832749127e8d10c543e0a691743f1" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.018731 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.020993 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c54c-account-create-q95c7" event={"ID":"27bd3313-2b8f-4ae9-acc8-f88633249dd9","Type":"ContainerDied","Data":"ce0deedbd3294062f2e4fe4bc419b4c1e6f0b030a163f877888b3a741adba150"} Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.021027 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce0deedbd3294062f2e4fe4bc419b4c1e6f0b030a163f877888b3a741adba150" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.021110 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c54c-account-create-q95c7" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.046757 4672 scope.go:117] "RemoveContainer" containerID="e9439c7c8c5df960de1d7f8b46766647496d0b182d4360275a52942c4ba15005" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.060519 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.074372 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.088183 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 15:09:32 crc kubenswrapper[4672]: E1007 15:09:32.089010 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27bd3313-2b8f-4ae9-acc8-f88633249dd9" containerName="mariadb-account-create" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.089255 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="27bd3313-2b8f-4ae9-acc8-f88633249dd9" containerName="mariadb-account-create" Oct 07 15:09:32 crc kubenswrapper[4672]: E1007 15:09:32.089286 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerName="cinder-scheduler" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.089292 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerName="cinder-scheduler" Oct 07 15:09:32 crc kubenswrapper[4672]: E1007 15:09:32.089489 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerName="probe" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.089502 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerName="probe" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.089951 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerName="cinder-scheduler" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.089970 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" containerName="probe" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.089992 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="27bd3313-2b8f-4ae9-acc8-f88633249dd9" containerName="mariadb-account-create" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.092161 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.097652 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.105221 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.232840 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-config-data\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.233000 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.233081 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.233150 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-scripts\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.233212 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/133e6c33-5248-4463-9a3b-75431b468373-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.233243 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhrzv\" (UniqueName: \"kubernetes.io/projected/133e6c33-5248-4463-9a3b-75431b468373-kube-api-access-dhrzv\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.334607 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-config-data\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.334708 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.334760 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.334809 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-scripts\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.334858 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/133e6c33-5248-4463-9a3b-75431b468373-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.335546 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhrzv\" (UniqueName: \"kubernetes.io/projected/133e6c33-5248-4463-9a3b-75431b468373-kube-api-access-dhrzv\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.335134 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/133e6c33-5248-4463-9a3b-75431b468373-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.341154 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-scripts\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.341217 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.341996 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-config-data\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.342587 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133e6c33-5248-4463-9a3b-75431b468373-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.371807 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhrzv\" (UniqueName: \"kubernetes.io/projected/133e6c33-5248-4463-9a3b-75431b468373-kube-api-access-dhrzv\") pod \"cinder-scheduler-0\" (UID: \"133e6c33-5248-4463-9a3b-75431b468373\") " pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.417067 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 15:09:32 crc kubenswrapper[4672]: I1007 15:09:32.910063 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 15:09:33 crc kubenswrapper[4672]: I1007 15:09:33.032244 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"133e6c33-5248-4463-9a3b-75431b468373","Type":"ContainerStarted","Data":"141d48633005649aebe5f6f254305c8c8e80dc8efba34bdd72e897ea3b16296b"} Oct 07 15:09:33 crc kubenswrapper[4672]: I1007 15:09:33.038919 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerStarted","Data":"5407b9a6b501a2e247816e613344936f8e45cd5c30624c082e3af5902bf756b8"} Oct 07 15:09:33 crc kubenswrapper[4672]: I1007 15:09:33.039051 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 15:09:33 crc kubenswrapper[4672]: I1007 15:09:33.059053 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.512035142 podStartE2EDuration="6.059025751s" podCreationTimestamp="2025-10-07 15:09:27 +0000 UTC" firstStartedPulling="2025-10-07 15:09:28.288323581 +0000 UTC m=+1245.263502162" lastFinishedPulling="2025-10-07 15:09:31.83531419 +0000 UTC m=+1248.810492771" observedRunningTime="2025-10-07 15:09:33.058843476 +0000 UTC m=+1250.034022057" watchObservedRunningTime="2025-10-07 15:09:33.059025751 +0000 UTC m=+1250.034204332" Oct 07 15:09:33 crc kubenswrapper[4672]: I1007 15:09:33.907536 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2aa440b2-4922-4a96-bc12-38f2ae3e374a" path="/var/lib/kubelet/pods/2aa440b2-4922-4a96-bc12-38f2ae3e374a/volumes" Oct 07 15:09:34 crc kubenswrapper[4672]: I1007 15:09:34.068246 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"133e6c33-5248-4463-9a3b-75431b468373","Type":"ContainerStarted","Data":"9b424e451a096807eda1d6058dff6f27df5b0e932e9dc962fa0c27d9e36e21b3"} Oct 07 15:09:34 crc kubenswrapper[4672]: I1007 15:09:34.432453 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 15:09:34 crc kubenswrapper[4672]: I1007 15:09:34.432879 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 15:09:34 crc kubenswrapper[4672]: I1007 15:09:34.484997 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 15:09:34 crc kubenswrapper[4672]: I1007 15:09:34.486211 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 15:09:35 crc kubenswrapper[4672]: I1007 15:09:35.081686 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"133e6c33-5248-4463-9a3b-75431b468373","Type":"ContainerStarted","Data":"4ff9a8597a3b02f28e00895414d4144dab66bdede6e836122d26f60114b489c6"} Oct 07 15:09:35 crc kubenswrapper[4672]: I1007 15:09:35.082122 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 15:09:35 crc kubenswrapper[4672]: I1007 15:09:35.083753 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 15:09:35 crc kubenswrapper[4672]: I1007 15:09:35.112911 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.112847351 podStartE2EDuration="3.112847351s" podCreationTimestamp="2025-10-07 15:09:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:35.108045211 +0000 UTC m=+1252.083223812" watchObservedRunningTime="2025-10-07 15:09:35.112847351 +0000 UTC m=+1252.088025942" Oct 07 15:09:35 crc kubenswrapper[4672]: I1007 15:09:35.253234 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 07 15:09:35 crc kubenswrapper[4672]: I1007 15:09:35.667430 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6c7bf59845-qwdkz" Oct 07 15:09:35 crc kubenswrapper[4672]: I1007 15:09:35.728226 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-544fc9bf6b-wfkjb"] Oct 07 15:09:35 crc kubenswrapper[4672]: I1007 15:09:35.728592 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-544fc9bf6b-wfkjb" podUID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerName="neutron-api" containerID="cri-o://5a702e8a30c015fddceb118b2c7dba9af0ed3ff6ca48e53f2afd284434aa4316" gracePeriod=30 Oct 07 15:09:35 crc kubenswrapper[4672]: I1007 15:09:35.728756 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-544fc9bf6b-wfkjb" podUID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerName="neutron-httpd" containerID="cri-o://1894479f002df42e43f5b0e8adee0de7c1757ea5446835ee3137a6c3a96f62e7" gracePeriod=30 Oct 07 15:09:36 crc kubenswrapper[4672]: I1007 15:09:36.095039 4672 generic.go:334] "Generic (PLEG): container finished" podID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerID="1894479f002df42e43f5b0e8adee0de7c1757ea5446835ee3137a6c3a96f62e7" exitCode=0 Oct 07 15:09:36 crc kubenswrapper[4672]: I1007 15:09:36.095916 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-544fc9bf6b-wfkjb" event={"ID":"b9491fc8-cf87-4bfe-8907-f2362983fcc1","Type":"ContainerDied","Data":"1894479f002df42e43f5b0e8adee0de7c1757ea5446835ee3137a6c3a96f62e7"} Oct 07 15:09:36 crc kubenswrapper[4672]: I1007 15:09:36.254083 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:36 crc kubenswrapper[4672]: I1007 15:09:36.254148 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:36 crc kubenswrapper[4672]: I1007 15:09:36.289645 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:36 crc kubenswrapper[4672]: I1007 15:09:36.300562 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:37 crc kubenswrapper[4672]: I1007 15:09:37.102657 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 15:09:37 crc kubenswrapper[4672]: I1007 15:09:37.102722 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 15:09:37 crc kubenswrapper[4672]: I1007 15:09:37.103033 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:37 crc kubenswrapper[4672]: I1007 15:09:37.103084 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:37 crc kubenswrapper[4672]: I1007 15:09:37.341012 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 15:09:37 crc kubenswrapper[4672]: I1007 15:09:37.417276 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 07 15:09:37 crc kubenswrapper[4672]: I1007 15:09:37.506744 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 15:09:38 crc kubenswrapper[4672]: I1007 15:09:38.887871 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cf8d-account-create-lfxbd"] Oct 07 15:09:38 crc kubenswrapper[4672]: I1007 15:09:38.889651 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cf8d-account-create-lfxbd" Oct 07 15:09:38 crc kubenswrapper[4672]: I1007 15:09:38.892286 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 07 15:09:38 crc kubenswrapper[4672]: I1007 15:09:38.905160 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cf8d-account-create-lfxbd"] Oct 07 15:09:38 crc kubenswrapper[4672]: I1007 15:09:38.977872 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl6gq\" (UniqueName: \"kubernetes.io/projected/1e1ba3cb-b673-4203-8790-2561949aa72f-kube-api-access-dl6gq\") pod \"nova-cell0-cf8d-account-create-lfxbd\" (UID: \"1e1ba3cb-b673-4203-8790-2561949aa72f\") " pod="openstack/nova-cell0-cf8d-account-create-lfxbd" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.080114 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl6gq\" (UniqueName: \"kubernetes.io/projected/1e1ba3cb-b673-4203-8790-2561949aa72f-kube-api-access-dl6gq\") pod \"nova-cell0-cf8d-account-create-lfxbd\" (UID: \"1e1ba3cb-b673-4203-8790-2561949aa72f\") " pod="openstack/nova-cell0-cf8d-account-create-lfxbd" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.086994 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-b437-account-create-pvlh9"] Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.088579 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-b437-account-create-pvlh9" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.091448 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.106310 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-b437-account-create-pvlh9"] Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.111885 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl6gq\" (UniqueName: \"kubernetes.io/projected/1e1ba3cb-b673-4203-8790-2561949aa72f-kube-api-access-dl6gq\") pod \"nova-cell0-cf8d-account-create-lfxbd\" (UID: \"1e1ba3cb-b673-4203-8790-2561949aa72f\") " pod="openstack/nova-cell0-cf8d-account-create-lfxbd" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.181485 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnr5l\" (UniqueName: \"kubernetes.io/projected/bc280071-0df1-46dd-8d72-c48efe297e6c-kube-api-access-gnr5l\") pod \"nova-cell1-b437-account-create-pvlh9\" (UID: \"bc280071-0df1-46dd-8d72-c48efe297e6c\") " pod="openstack/nova-cell1-b437-account-create-pvlh9" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.207062 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cf8d-account-create-lfxbd" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.283573 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnr5l\" (UniqueName: \"kubernetes.io/projected/bc280071-0df1-46dd-8d72-c48efe297e6c-kube-api-access-gnr5l\") pod \"nova-cell1-b437-account-create-pvlh9\" (UID: \"bc280071-0df1-46dd-8d72-c48efe297e6c\") " pod="openstack/nova-cell1-b437-account-create-pvlh9" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.305716 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnr5l\" (UniqueName: \"kubernetes.io/projected/bc280071-0df1-46dd-8d72-c48efe297e6c-kube-api-access-gnr5l\") pod \"nova-cell1-b437-account-create-pvlh9\" (UID: \"bc280071-0df1-46dd-8d72-c48efe297e6c\") " pod="openstack/nova-cell1-b437-account-create-pvlh9" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.474495 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-b437-account-create-pvlh9" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.522694 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.522772 4672 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.771700 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 15:09:39 crc kubenswrapper[4672]: I1007 15:09:39.857455 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cf8d-account-create-lfxbd"] Oct 07 15:09:39 crc kubenswrapper[4672]: W1007 15:09:39.861746 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e1ba3cb_b673_4203_8790_2561949aa72f.slice/crio-abe7dca820e319f7049df78ee43dbd90168ec8a975a7b6932da79c61bc6f4d5d WatchSource:0}: Error finding container abe7dca820e319f7049df78ee43dbd90168ec8a975a7b6932da79c61bc6f4d5d: Status 404 returned error can't find the container with id abe7dca820e319f7049df78ee43dbd90168ec8a975a7b6932da79c61bc6f4d5d Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.105362 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-b437-account-create-pvlh9"] Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.141807 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.146152 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="ceilometer-central-agent" containerID="cri-o://9771fb9a1645c29b223683ad803761922b56903de431739534be10627b8efb41" gracePeriod=30 Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.146584 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="proxy-httpd" containerID="cri-o://5407b9a6b501a2e247816e613344936f8e45cd5c30624c082e3af5902bf756b8" gracePeriod=30 Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.146631 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="sg-core" containerID="cri-o://6ec4b4e568f4fb0d2c37640c1aeb7fb978a936312f2e72c53577af7cf8bb39a6" gracePeriod=30 Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.146671 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="ceilometer-notification-agent" containerID="cri-o://eba5ca58caf26d0b6293aa0888dd7ca75f6f8af626653ac40617f92a9fca5862" gracePeriod=30 Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.169404 4672 generic.go:334] "Generic (PLEG): container finished" podID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerID="5a702e8a30c015fddceb118b2c7dba9af0ed3ff6ca48e53f2afd284434aa4316" exitCode=0 Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.169728 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-544fc9bf6b-wfkjb" event={"ID":"b9491fc8-cf87-4bfe-8907-f2362983fcc1","Type":"ContainerDied","Data":"5a702e8a30c015fddceb118b2c7dba9af0ed3ff6ca48e53f2afd284434aa4316"} Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.190242 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-b437-account-create-pvlh9" event={"ID":"bc280071-0df1-46dd-8d72-c48efe297e6c","Type":"ContainerStarted","Data":"219d30230a25290fce239b796846059ea4b2f80388ecd8b43249f9742560f7c9"} Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.197257 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cf8d-account-create-lfxbd" event={"ID":"1e1ba3cb-b673-4203-8790-2561949aa72f","Type":"ContainerStarted","Data":"f39292180daf5766d19b71f5347c6d0fd06fbbc9e90f22bd63326b920d279326"} Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.197302 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cf8d-account-create-lfxbd" event={"ID":"1e1ba3cb-b673-4203-8790-2561949aa72f","Type":"ContainerStarted","Data":"abe7dca820e319f7049df78ee43dbd90168ec8a975a7b6932da79c61bc6f4d5d"} Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.222137 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cf8d-account-create-lfxbd" podStartSLOduration=2.222118169 podStartE2EDuration="2.222118169s" podCreationTimestamp="2025-10-07 15:09:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:09:40.221947074 +0000 UTC m=+1257.197125655" watchObservedRunningTime="2025-10-07 15:09:40.222118169 +0000 UTC m=+1257.197296750" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.565893 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.631666 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-httpd-config\") pod \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.631765 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-combined-ca-bundle\") pod \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.631846 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgqk8\" (UniqueName: \"kubernetes.io/projected/b9491fc8-cf87-4bfe-8907-f2362983fcc1-kube-api-access-tgqk8\") pod \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.631946 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-config\") pod \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.632001 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-ovndb-tls-certs\") pod \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\" (UID: \"b9491fc8-cf87-4bfe-8907-f2362983fcc1\") " Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.644307 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "b9491fc8-cf87-4bfe-8907-f2362983fcc1" (UID: "b9491fc8-cf87-4bfe-8907-f2362983fcc1"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.645914 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9491fc8-cf87-4bfe-8907-f2362983fcc1-kube-api-access-tgqk8" (OuterVolumeSpecName: "kube-api-access-tgqk8") pod "b9491fc8-cf87-4bfe-8907-f2362983fcc1" (UID: "b9491fc8-cf87-4bfe-8907-f2362983fcc1"). InnerVolumeSpecName "kube-api-access-tgqk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.697228 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-config" (OuterVolumeSpecName: "config") pod "b9491fc8-cf87-4bfe-8907-f2362983fcc1" (UID: "b9491fc8-cf87-4bfe-8907-f2362983fcc1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.701480 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9491fc8-cf87-4bfe-8907-f2362983fcc1" (UID: "b9491fc8-cf87-4bfe-8907-f2362983fcc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.730068 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "b9491fc8-cf87-4bfe-8907-f2362983fcc1" (UID: "b9491fc8-cf87-4bfe-8907-f2362983fcc1"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.734101 4672 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.734126 4672 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.734139 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.734149 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgqk8\" (UniqueName: \"kubernetes.io/projected/b9491fc8-cf87-4bfe-8907-f2362983fcc1-kube-api-access-tgqk8\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:40 crc kubenswrapper[4672]: I1007 15:09:40.734159 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9491fc8-cf87-4bfe-8907-f2362983fcc1-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.215390 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-544fc9bf6b-wfkjb" event={"ID":"b9491fc8-cf87-4bfe-8907-f2362983fcc1","Type":"ContainerDied","Data":"21515c4b1387ae95a73b5adda9417683b5d6da71adbf60b9bf4abb248dd9ab3e"} Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.215455 4672 scope.go:117] "RemoveContainer" containerID="1894479f002df42e43f5b0e8adee0de7c1757ea5446835ee3137a6c3a96f62e7" Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.215642 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-544fc9bf6b-wfkjb" Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.230463 4672 generic.go:334] "Generic (PLEG): container finished" podID="bc280071-0df1-46dd-8d72-c48efe297e6c" containerID="0f069ee60985b3831f377ec71cfc25ca7d4608545cd43bb174713b71cf0108f7" exitCode=0 Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.230548 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-b437-account-create-pvlh9" event={"ID":"bc280071-0df1-46dd-8d72-c48efe297e6c","Type":"ContainerDied","Data":"0f069ee60985b3831f377ec71cfc25ca7d4608545cd43bb174713b71cf0108f7"} Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.247679 4672 generic.go:334] "Generic (PLEG): container finished" podID="1e1ba3cb-b673-4203-8790-2561949aa72f" containerID="f39292180daf5766d19b71f5347c6d0fd06fbbc9e90f22bd63326b920d279326" exitCode=0 Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.247783 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cf8d-account-create-lfxbd" event={"ID":"1e1ba3cb-b673-4203-8790-2561949aa72f","Type":"ContainerDied","Data":"f39292180daf5766d19b71f5347c6d0fd06fbbc9e90f22bd63326b920d279326"} Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.259965 4672 scope.go:117] "RemoveContainer" containerID="5a702e8a30c015fddceb118b2c7dba9af0ed3ff6ca48e53f2afd284434aa4316" Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.283448 4672 generic.go:334] "Generic (PLEG): container finished" podID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerID="5407b9a6b501a2e247816e613344936f8e45cd5c30624c082e3af5902bf756b8" exitCode=0 Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.283517 4672 generic.go:334] "Generic (PLEG): container finished" podID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerID="6ec4b4e568f4fb0d2c37640c1aeb7fb978a936312f2e72c53577af7cf8bb39a6" exitCode=2 Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.283525 4672 generic.go:334] "Generic (PLEG): container finished" podID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerID="9771fb9a1645c29b223683ad803761922b56903de431739534be10627b8efb41" exitCode=0 Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.283560 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerDied","Data":"5407b9a6b501a2e247816e613344936f8e45cd5c30624c082e3af5902bf756b8"} Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.283600 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerDied","Data":"6ec4b4e568f4fb0d2c37640c1aeb7fb978a936312f2e72c53577af7cf8bb39a6"} Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.283614 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerDied","Data":"9771fb9a1645c29b223683ad803761922b56903de431739534be10627b8efb41"} Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.320127 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-544fc9bf6b-wfkjb"] Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.349284 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-544fc9bf6b-wfkjb"] Oct 07 15:09:41 crc kubenswrapper[4672]: I1007 15:09:41.908308 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" path="/var/lib/kubelet/pods/b9491fc8-cf87-4bfe-8907-f2362983fcc1/volumes" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.295282 4672 generic.go:334] "Generic (PLEG): container finished" podID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerID="eba5ca58caf26d0b6293aa0888dd7ca75f6f8af626653ac40617f92a9fca5862" exitCode=0 Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.295354 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerDied","Data":"eba5ca58caf26d0b6293aa0888dd7ca75f6f8af626653ac40617f92a9fca5862"} Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.295687 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0bd7d3a3-a711-47e1-b105-9865629b64a2","Type":"ContainerDied","Data":"6265daf3f237d7d71ff4cb38763934658c18a67ae323cee6faadf8d72071a42e"} Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.295705 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6265daf3f237d7d71ff4cb38763934658c18a67ae323cee6faadf8d72071a42e" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.306497 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.376941 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2gc9\" (UniqueName: \"kubernetes.io/projected/0bd7d3a3-a711-47e1-b105-9865629b64a2-kube-api-access-x2gc9\") pod \"0bd7d3a3-a711-47e1-b105-9865629b64a2\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.377303 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-sg-core-conf-yaml\") pod \"0bd7d3a3-a711-47e1-b105-9865629b64a2\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.377452 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-config-data\") pod \"0bd7d3a3-a711-47e1-b105-9865629b64a2\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.377487 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-combined-ca-bundle\") pod \"0bd7d3a3-a711-47e1-b105-9865629b64a2\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.377558 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-log-httpd\") pod \"0bd7d3a3-a711-47e1-b105-9865629b64a2\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.377658 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-run-httpd\") pod \"0bd7d3a3-a711-47e1-b105-9865629b64a2\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.377717 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-scripts\") pod \"0bd7d3a3-a711-47e1-b105-9865629b64a2\" (UID: \"0bd7d3a3-a711-47e1-b105-9865629b64a2\") " Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.378544 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0bd7d3a3-a711-47e1-b105-9865629b64a2" (UID: "0bd7d3a3-a711-47e1-b105-9865629b64a2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.379400 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0bd7d3a3-a711-47e1-b105-9865629b64a2" (UID: "0bd7d3a3-a711-47e1-b105-9865629b64a2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.385596 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-scripts" (OuterVolumeSpecName: "scripts") pod "0bd7d3a3-a711-47e1-b105-9865629b64a2" (UID: "0bd7d3a3-a711-47e1-b105-9865629b64a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.412708 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bd7d3a3-a711-47e1-b105-9865629b64a2-kube-api-access-x2gc9" (OuterVolumeSpecName: "kube-api-access-x2gc9") pod "0bd7d3a3-a711-47e1-b105-9865629b64a2" (UID: "0bd7d3a3-a711-47e1-b105-9865629b64a2"). InnerVolumeSpecName "kube-api-access-x2gc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.419094 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0bd7d3a3-a711-47e1-b105-9865629b64a2" (UID: "0bd7d3a3-a711-47e1-b105-9865629b64a2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.489325 4672 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.489363 4672 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0bd7d3a3-a711-47e1-b105-9865629b64a2-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.489375 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.489386 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2gc9\" (UniqueName: \"kubernetes.io/projected/0bd7d3a3-a711-47e1-b105-9865629b64a2-kube-api-access-x2gc9\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.489400 4672 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.511960 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0bd7d3a3-a711-47e1-b105-9865629b64a2" (UID: "0bd7d3a3-a711-47e1-b105-9865629b64a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.532646 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-config-data" (OuterVolumeSpecName: "config-data") pod "0bd7d3a3-a711-47e1-b105-9865629b64a2" (UID: "0bd7d3a3-a711-47e1-b105-9865629b64a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.591696 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.591755 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd7d3a3-a711-47e1-b105-9865629b64a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.734421 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.762054 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-b437-account-create-pvlh9" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.787784 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cf8d-account-create-lfxbd" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.896986 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnr5l\" (UniqueName: \"kubernetes.io/projected/bc280071-0df1-46dd-8d72-c48efe297e6c-kube-api-access-gnr5l\") pod \"bc280071-0df1-46dd-8d72-c48efe297e6c\" (UID: \"bc280071-0df1-46dd-8d72-c48efe297e6c\") " Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.897101 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl6gq\" (UniqueName: \"kubernetes.io/projected/1e1ba3cb-b673-4203-8790-2561949aa72f-kube-api-access-dl6gq\") pod \"1e1ba3cb-b673-4203-8790-2561949aa72f\" (UID: \"1e1ba3cb-b673-4203-8790-2561949aa72f\") " Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.902258 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e1ba3cb-b673-4203-8790-2561949aa72f-kube-api-access-dl6gq" (OuterVolumeSpecName: "kube-api-access-dl6gq") pod "1e1ba3cb-b673-4203-8790-2561949aa72f" (UID: "1e1ba3cb-b673-4203-8790-2561949aa72f"). InnerVolumeSpecName "kube-api-access-dl6gq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.904171 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc280071-0df1-46dd-8d72-c48efe297e6c-kube-api-access-gnr5l" (OuterVolumeSpecName: "kube-api-access-gnr5l") pod "bc280071-0df1-46dd-8d72-c48efe297e6c" (UID: "bc280071-0df1-46dd-8d72-c48efe297e6c"). InnerVolumeSpecName "kube-api-access-gnr5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.999100 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnr5l\" (UniqueName: \"kubernetes.io/projected/bc280071-0df1-46dd-8d72-c48efe297e6c-kube-api-access-gnr5l\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:42 crc kubenswrapper[4672]: I1007 15:09:42.999129 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl6gq\" (UniqueName: \"kubernetes.io/projected/1e1ba3cb-b673-4203-8790-2561949aa72f-kube-api-access-dl6gq\") on node \"crc\" DevicePath \"\"" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.308629 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-b437-account-create-pvlh9" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.309646 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-b437-account-create-pvlh9" event={"ID":"bc280071-0df1-46dd-8d72-c48efe297e6c","Type":"ContainerDied","Data":"219d30230a25290fce239b796846059ea4b2f80388ecd8b43249f9742560f7c9"} Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.309789 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="219d30230a25290fce239b796846059ea4b2f80388ecd8b43249f9742560f7c9" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.311000 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cf8d-account-create-lfxbd" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.310998 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cf8d-account-create-lfxbd" event={"ID":"1e1ba3cb-b673-4203-8790-2561949aa72f","Type":"ContainerDied","Data":"abe7dca820e319f7049df78ee43dbd90168ec8a975a7b6932da79c61bc6f4d5d"} Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.311211 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abe7dca820e319f7049df78ee43dbd90168ec8a975a7b6932da79c61bc6f4d5d" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.311047 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.354975 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.366251 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.377963 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:43 crc kubenswrapper[4672]: E1007 15:09:43.378418 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerName="neutron-httpd" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378435 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerName="neutron-httpd" Oct 07 15:09:43 crc kubenswrapper[4672]: E1007 15:09:43.378458 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc280071-0df1-46dd-8d72-c48efe297e6c" containerName="mariadb-account-create" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378465 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc280071-0df1-46dd-8d72-c48efe297e6c" containerName="mariadb-account-create" Oct 07 15:09:43 crc kubenswrapper[4672]: E1007 15:09:43.378521 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="ceilometer-notification-agent" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378531 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="ceilometer-notification-agent" Oct 07 15:09:43 crc kubenswrapper[4672]: E1007 15:09:43.378542 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerName="neutron-api" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378551 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerName="neutron-api" Oct 07 15:09:43 crc kubenswrapper[4672]: E1007 15:09:43.378571 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="proxy-httpd" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378579 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="proxy-httpd" Oct 07 15:09:43 crc kubenswrapper[4672]: E1007 15:09:43.378594 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e1ba3cb-b673-4203-8790-2561949aa72f" containerName="mariadb-account-create" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378601 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e1ba3cb-b673-4203-8790-2561949aa72f" containerName="mariadb-account-create" Oct 07 15:09:43 crc kubenswrapper[4672]: E1007 15:09:43.378611 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="sg-core" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378618 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="sg-core" Oct 07 15:09:43 crc kubenswrapper[4672]: E1007 15:09:43.378629 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="ceilometer-central-agent" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378637 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="ceilometer-central-agent" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378860 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="sg-core" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378876 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="proxy-httpd" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378891 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e1ba3cb-b673-4203-8790-2561949aa72f" containerName="mariadb-account-create" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378909 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="ceilometer-notification-agent" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378918 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" containerName="ceilometer-central-agent" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378933 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerName="neutron-httpd" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378945 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc280071-0df1-46dd-8d72-c48efe297e6c" containerName="mariadb-account-create" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.378957 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9491fc8-cf87-4bfe-8907-f2362983fcc1" containerName="neutron-api" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.381543 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.384283 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.386743 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.391056 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.506411 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcmwq\" (UniqueName: \"kubernetes.io/projected/a51442c9-d9dc-45ed-85c9-824be83a6d05-kube-api-access-jcmwq\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.506469 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-config-data\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.506543 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-scripts\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.506742 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.506783 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.506941 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-log-httpd\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.507153 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-run-httpd\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.608890 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-scripts\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.608963 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.608994 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.609062 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-log-httpd\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.609091 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-run-httpd\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.609140 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcmwq\" (UniqueName: \"kubernetes.io/projected/a51442c9-d9dc-45ed-85c9-824be83a6d05-kube-api-access-jcmwq\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.609167 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-config-data\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.609860 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-run-httpd\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.609923 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-log-httpd\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.613182 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.613643 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-config-data\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.614502 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-scripts\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.624987 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.627575 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcmwq\" (UniqueName: \"kubernetes.io/projected/a51442c9-d9dc-45ed-85c9-824be83a6d05-kube-api-access-jcmwq\") pod \"ceilometer-0\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.703408 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:09:43 crc kubenswrapper[4672]: I1007 15:09:43.910375 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bd7d3a3-a711-47e1-b105-9865629b64a2" path="/var/lib/kubelet/pods/0bd7d3a3-a711-47e1-b105-9865629b64a2/volumes" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.148479 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-pz2ts"] Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.150138 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.152618 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.152764 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.153664 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-ncgzw" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.167684 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-pz2ts"] Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.204874 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.229937 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-config-data\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.230274 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-scripts\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.230382 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdtw5\" (UniqueName: \"kubernetes.io/projected/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-kube-api-access-wdtw5\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.230493 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.322708 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerStarted","Data":"0122c3b42d2f38862c5cce87b4b08b3a6c34f1878e3f642c521e7dc6e8f9733f"} Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.332463 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-scripts\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.332533 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdtw5\" (UniqueName: \"kubernetes.io/projected/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-kube-api-access-wdtw5\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.332570 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.332727 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-config-data\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.338967 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.339985 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-config-data\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.344721 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-scripts\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.355062 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdtw5\" (UniqueName: \"kubernetes.io/projected/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-kube-api-access-wdtw5\") pod \"nova-cell0-conductor-db-sync-pz2ts\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:44 crc kubenswrapper[4672]: I1007 15:09:44.473763 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:09:45 crc kubenswrapper[4672]: I1007 15:09:45.049801 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-pz2ts"] Oct 07 15:09:45 crc kubenswrapper[4672]: W1007 15:09:45.060851 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a5bc8d8_deb5_46de_a6f1_c90f36e71104.slice/crio-91f8c53d3c9adf648b6214c2b60b00ffec87c566291ecdcdbb5b4e0e8e124a8e WatchSource:0}: Error finding container 91f8c53d3c9adf648b6214c2b60b00ffec87c566291ecdcdbb5b4e0e8e124a8e: Status 404 returned error can't find the container with id 91f8c53d3c9adf648b6214c2b60b00ffec87c566291ecdcdbb5b4e0e8e124a8e Oct 07 15:09:45 crc kubenswrapper[4672]: I1007 15:09:45.333775 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-pz2ts" event={"ID":"2a5bc8d8-deb5-46de-a6f1-c90f36e71104","Type":"ContainerStarted","Data":"91f8c53d3c9adf648b6214c2b60b00ffec87c566291ecdcdbb5b4e0e8e124a8e"} Oct 07 15:09:45 crc kubenswrapper[4672]: I1007 15:09:45.336144 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerStarted","Data":"4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c"} Oct 07 15:09:45 crc kubenswrapper[4672]: I1007 15:09:45.336175 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerStarted","Data":"2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504"} Oct 07 15:09:46 crc kubenswrapper[4672]: I1007 15:09:46.349943 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerStarted","Data":"03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4"} Oct 07 15:09:52 crc kubenswrapper[4672]: I1007 15:09:52.401372 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerStarted","Data":"2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274"} Oct 07 15:09:52 crc kubenswrapper[4672]: I1007 15:09:52.401853 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 15:09:52 crc kubenswrapper[4672]: I1007 15:09:52.404962 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-pz2ts" event={"ID":"2a5bc8d8-deb5-46de-a6f1-c90f36e71104","Type":"ContainerStarted","Data":"2007561754c1ecf46513b00e33e6a46c5635c673725dd1dfd0924024b25956a3"} Oct 07 15:09:52 crc kubenswrapper[4672]: I1007 15:09:52.430560 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.538754359 podStartE2EDuration="9.430542807s" podCreationTimestamp="2025-10-07 15:09:43 +0000 UTC" firstStartedPulling="2025-10-07 15:09:44.212425935 +0000 UTC m=+1261.187604516" lastFinishedPulling="2025-10-07 15:09:52.104214383 +0000 UTC m=+1269.079392964" observedRunningTime="2025-10-07 15:09:52.42307537 +0000 UTC m=+1269.398253951" watchObservedRunningTime="2025-10-07 15:09:52.430542807 +0000 UTC m=+1269.405721388" Oct 07 15:09:52 crc kubenswrapper[4672]: I1007 15:09:52.445740 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-pz2ts" podStartSLOduration=1.403381433 podStartE2EDuration="8.445717248s" podCreationTimestamp="2025-10-07 15:09:44 +0000 UTC" firstStartedPulling="2025-10-07 15:09:45.063713531 +0000 UTC m=+1262.038892112" lastFinishedPulling="2025-10-07 15:09:52.106049346 +0000 UTC m=+1269.081227927" observedRunningTime="2025-10-07 15:09:52.439054064 +0000 UTC m=+1269.414232645" watchObservedRunningTime="2025-10-07 15:09:52.445717248 +0000 UTC m=+1269.420895829" Oct 07 15:10:03 crc kubenswrapper[4672]: I1007 15:10:03.504808 4672 generic.go:334] "Generic (PLEG): container finished" podID="2a5bc8d8-deb5-46de-a6f1-c90f36e71104" containerID="2007561754c1ecf46513b00e33e6a46c5635c673725dd1dfd0924024b25956a3" exitCode=0 Oct 07 15:10:03 crc kubenswrapper[4672]: I1007 15:10:03.504930 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-pz2ts" event={"ID":"2a5bc8d8-deb5-46de-a6f1-c90f36e71104","Type":"ContainerDied","Data":"2007561754c1ecf46513b00e33e6a46c5635c673725dd1dfd0924024b25956a3"} Oct 07 15:10:04 crc kubenswrapper[4672]: I1007 15:10:04.854518 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:10:04 crc kubenswrapper[4672]: I1007 15:10:04.939908 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-combined-ca-bundle\") pod \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " Oct 07 15:10:04 crc kubenswrapper[4672]: I1007 15:10:04.939966 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdtw5\" (UniqueName: \"kubernetes.io/projected/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-kube-api-access-wdtw5\") pod \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " Oct 07 15:10:04 crc kubenswrapper[4672]: I1007 15:10:04.940098 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-scripts\") pod \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " Oct 07 15:10:04 crc kubenswrapper[4672]: I1007 15:10:04.940195 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-config-data\") pod \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\" (UID: \"2a5bc8d8-deb5-46de-a6f1-c90f36e71104\") " Oct 07 15:10:04 crc kubenswrapper[4672]: I1007 15:10:04.945887 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-scripts" (OuterVolumeSpecName: "scripts") pod "2a5bc8d8-deb5-46de-a6f1-c90f36e71104" (UID: "2a5bc8d8-deb5-46de-a6f1-c90f36e71104"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:04 crc kubenswrapper[4672]: I1007 15:10:04.945934 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-kube-api-access-wdtw5" (OuterVolumeSpecName: "kube-api-access-wdtw5") pod "2a5bc8d8-deb5-46de-a6f1-c90f36e71104" (UID: "2a5bc8d8-deb5-46de-a6f1-c90f36e71104"). InnerVolumeSpecName "kube-api-access-wdtw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:04 crc kubenswrapper[4672]: I1007 15:10:04.966830 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-config-data" (OuterVolumeSpecName: "config-data") pod "2a5bc8d8-deb5-46de-a6f1-c90f36e71104" (UID: "2a5bc8d8-deb5-46de-a6f1-c90f36e71104"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:04 crc kubenswrapper[4672]: I1007 15:10:04.969202 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a5bc8d8-deb5-46de-a6f1-c90f36e71104" (UID: "2a5bc8d8-deb5-46de-a6f1-c90f36e71104"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.042781 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.043143 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.043187 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.043200 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdtw5\" (UniqueName: \"kubernetes.io/projected/2a5bc8d8-deb5-46de-a6f1-c90f36e71104-kube-api-access-wdtw5\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.532838 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-pz2ts" event={"ID":"2a5bc8d8-deb5-46de-a6f1-c90f36e71104","Type":"ContainerDied","Data":"91f8c53d3c9adf648b6214c2b60b00ffec87c566291ecdcdbb5b4e0e8e124a8e"} Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.533132 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91f8c53d3c9adf648b6214c2b60b00ffec87c566291ecdcdbb5b4e0e8e124a8e" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.533037 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-pz2ts" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.629190 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 07 15:10:05 crc kubenswrapper[4672]: E1007 15:10:05.629589 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a5bc8d8-deb5-46de-a6f1-c90f36e71104" containerName="nova-cell0-conductor-db-sync" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.629609 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a5bc8d8-deb5-46de-a6f1-c90f36e71104" containerName="nova-cell0-conductor-db-sync" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.629803 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a5bc8d8-deb5-46de-a6f1-c90f36e71104" containerName="nova-cell0-conductor-db-sync" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.630406 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.634591 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-ncgzw" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.634858 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.645962 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.754089 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34190ed8-8a54-40ab-b9d4-85f48bc24ee9-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"34190ed8-8a54-40ab-b9d4-85f48bc24ee9\") " pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.754210 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx29k\" (UniqueName: \"kubernetes.io/projected/34190ed8-8a54-40ab-b9d4-85f48bc24ee9-kube-api-access-gx29k\") pod \"nova-cell0-conductor-0\" (UID: \"34190ed8-8a54-40ab-b9d4-85f48bc24ee9\") " pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.754247 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34190ed8-8a54-40ab-b9d4-85f48bc24ee9-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"34190ed8-8a54-40ab-b9d4-85f48bc24ee9\") " pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.855971 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx29k\" (UniqueName: \"kubernetes.io/projected/34190ed8-8a54-40ab-b9d4-85f48bc24ee9-kube-api-access-gx29k\") pod \"nova-cell0-conductor-0\" (UID: \"34190ed8-8a54-40ab-b9d4-85f48bc24ee9\") " pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.856043 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34190ed8-8a54-40ab-b9d4-85f48bc24ee9-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"34190ed8-8a54-40ab-b9d4-85f48bc24ee9\") " pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.856115 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34190ed8-8a54-40ab-b9d4-85f48bc24ee9-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"34190ed8-8a54-40ab-b9d4-85f48bc24ee9\") " pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.859949 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34190ed8-8a54-40ab-b9d4-85f48bc24ee9-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"34190ed8-8a54-40ab-b9d4-85f48bc24ee9\") " pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.860152 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34190ed8-8a54-40ab-b9d4-85f48bc24ee9-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"34190ed8-8a54-40ab-b9d4-85f48bc24ee9\") " pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.872606 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx29k\" (UniqueName: \"kubernetes.io/projected/34190ed8-8a54-40ab-b9d4-85f48bc24ee9-kube-api-access-gx29k\") pod \"nova-cell0-conductor-0\" (UID: \"34190ed8-8a54-40ab-b9d4-85f48bc24ee9\") " pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:05 crc kubenswrapper[4672]: I1007 15:10:05.958858 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:06 crc kubenswrapper[4672]: I1007 15:10:06.382686 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 07 15:10:06 crc kubenswrapper[4672]: I1007 15:10:06.542138 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"34190ed8-8a54-40ab-b9d4-85f48bc24ee9","Type":"ContainerStarted","Data":"44de05ff61adeb3a502a2ae7f1da95cb43434f110b6b979f45b1c4b5931541c5"} Oct 07 15:10:06 crc kubenswrapper[4672]: I1007 15:10:06.542415 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"34190ed8-8a54-40ab-b9d4-85f48bc24ee9","Type":"ContainerStarted","Data":"a318e71353200ed06682ae34f3d854431d3396f88f83c8768b635a4cd4cb15fe"} Oct 07 15:10:06 crc kubenswrapper[4672]: I1007 15:10:06.542617 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:06 crc kubenswrapper[4672]: I1007 15:10:06.564478 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.56445764 podStartE2EDuration="1.56445764s" podCreationTimestamp="2025-10-07 15:10:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:06.554048148 +0000 UTC m=+1283.529226749" watchObservedRunningTime="2025-10-07 15:10:06.56445764 +0000 UTC m=+1283.539636221" Oct 07 15:10:13 crc kubenswrapper[4672]: I1007 15:10:13.708881 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 07 15:10:15 crc kubenswrapper[4672]: I1007 15:10:15.987445 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.519859 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-fc94m"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.521622 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.523713 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.528104 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.531567 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fc94m"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.636265 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.636316 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-scripts\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.636344 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-config-data\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.636545 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jvkc\" (UniqueName: \"kubernetes.io/projected/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-kube-api-access-9jvkc\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.679874 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.681519 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.686400 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.707078 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.738895 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-config-data\") pod \"nova-scheduler-0\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.738950 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvz69\" (UniqueName: \"kubernetes.io/projected/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-kube-api-access-dvz69\") pod \"nova-scheduler-0\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.738988 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.739044 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.739064 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-scripts\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.739085 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-config-data\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.739243 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jvkc\" (UniqueName: \"kubernetes.io/projected/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-kube-api-access-9jvkc\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.754878 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-scripts\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.755208 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.756539 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.761083 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.767377 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.767410 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-config-data\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.771569 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jvkc\" (UniqueName: \"kubernetes.io/projected/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-kube-api-access-9jvkc\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.780855 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fc94m\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.829579 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.831120 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.834507 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.840240 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.840504 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvz69\" (UniqueName: \"kubernetes.io/projected/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-kube-api-access-dvz69\") pod \"nova-scheduler-0\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.840570 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.840636 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.840660 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7tk6\" (UniqueName: \"kubernetes.io/projected/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-kube-api-access-v7tk6\") pod \"nova-cell1-novncproxy-0\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.840728 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.840769 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-config-data\") pod \"nova-scheduler-0\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.842664 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.846496 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-config-data\") pod \"nova-scheduler-0\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.847516 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.880452 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvz69\" (UniqueName: \"kubernetes.io/projected/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-kube-api-access-dvz69\") pod \"nova-scheduler-0\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.898305 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.900678 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.904126 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.914415 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943371 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943423 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7tk6\" (UniqueName: \"kubernetes.io/projected/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-kube-api-access-v7tk6\") pod \"nova-cell1-novncproxy-0\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943464 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943494 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e90ba10-2a7d-474b-8a81-0049bc3a998a-logs\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943526 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943566 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-config-data\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943631 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swf4z\" (UniqueName: \"kubernetes.io/projected/63526151-0939-4fe1-b985-d7ec0c8d612b-kube-api-access-swf4z\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943669 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943730 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whw7f\" (UniqueName: \"kubernetes.io/projected/2e90ba10-2a7d-474b-8a81-0049bc3a998a-kube-api-access-whw7f\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943788 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-config-data\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.943819 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63526151-0939-4fe1-b985-d7ec0c8d612b-logs\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.958113 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.970481 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:16 crc kubenswrapper[4672]: I1007 15:10:16.992058 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7tk6\" (UniqueName: \"kubernetes.io/projected/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-kube-api-access-v7tk6\") pod \"nova-cell1-novncproxy-0\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.003425 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.004311 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.024814 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-597bd8d975-qqr42"] Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.026420 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.035618 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-597bd8d975-qqr42"] Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.047784 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-config-data\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.047842 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63526151-0939-4fe1-b985-d7ec0c8d612b-logs\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.047940 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.047971 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e90ba10-2a7d-474b-8a81-0049bc3a998a-logs\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.048004 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.048250 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-config-data\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.048307 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swf4z\" (UniqueName: \"kubernetes.io/projected/63526151-0939-4fe1-b985-d7ec0c8d612b-kube-api-access-swf4z\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.048474 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whw7f\" (UniqueName: \"kubernetes.io/projected/2e90ba10-2a7d-474b-8a81-0049bc3a998a-kube-api-access-whw7f\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.049889 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63526151-0939-4fe1-b985-d7ec0c8d612b-logs\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.053517 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e90ba10-2a7d-474b-8a81-0049bc3a998a-logs\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.058854 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.060679 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.067930 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-config-data\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.069277 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whw7f\" (UniqueName: \"kubernetes.io/projected/2e90ba10-2a7d-474b-8a81-0049bc3a998a-kube-api-access-whw7f\") pod \"nova-api-0\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " pod="openstack/nova-api-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.069594 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-config-data\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.074215 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swf4z\" (UniqueName: \"kubernetes.io/projected/63526151-0939-4fe1-b985-d7ec0c8d612b-kube-api-access-swf4z\") pod \"nova-metadata-0\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " pod="openstack/nova-metadata-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.149897 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-config\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.150244 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-nb\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.150411 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-svc\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.150851 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-swift-storage-0\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.151078 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92d2g\" (UniqueName: \"kubernetes.io/projected/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-kube-api-access-92d2g\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.151235 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-sb\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.253645 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-sb\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.254239 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-config\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.254282 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-nb\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.254313 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-svc\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.254577 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-swift-storage-0\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.254663 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92d2g\" (UniqueName: \"kubernetes.io/projected/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-kube-api-access-92d2g\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.254878 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-sb\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.255425 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-nb\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.255480 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-svc\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.255492 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-swift-storage-0\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.255956 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-config\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.274575 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92d2g\" (UniqueName: \"kubernetes.io/projected/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-kube-api-access-92d2g\") pod \"dnsmasq-dns-597bd8d975-qqr42\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.325803 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.336796 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.355969 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.574713 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fc94m"] Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.675517 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.696376 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fc94m" event={"ID":"12eb9041-9bc5-40ec-8d6e-8c2177b8acda","Type":"ContainerStarted","Data":"89e732f742f05a05a116b5d08b3804c147af2816005ec581c69df318727244b5"} Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.702095 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5dl6k"] Oct 07 15:10:17 crc kubenswrapper[4672]: W1007 15:10:17.702781 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b8f8b07_4874_45cd_b30b_d2348e9f65e1.slice/crio-a8804be1d81eabdc2495fe826eb92dfc6e87c83334bd48c2017ca1561d83adb8 WatchSource:0}: Error finding container a8804be1d81eabdc2495fe826eb92dfc6e87c83334bd48c2017ca1561d83adb8: Status 404 returned error can't find the container with id a8804be1d81eabdc2495fe826eb92dfc6e87c83334bd48c2017ca1561d83adb8 Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.707121 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.712102 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.712370 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.733210 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.742559 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5dl6k"] Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.772953 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-scripts\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.773107 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-config-data\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.773260 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.773309 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsgls\" (UniqueName: \"kubernetes.io/projected/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-kube-api-access-lsgls\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.875845 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-config-data\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.875999 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.876056 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsgls\" (UniqueName: \"kubernetes.io/projected/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-kube-api-access-lsgls\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.876113 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-scripts\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.885238 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-scripts\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.897553 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsgls\" (UniqueName: \"kubernetes.io/projected/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-kube-api-access-lsgls\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.898536 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.904850 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-config-data\") pod \"nova-cell1-conductor-db-sync-5dl6k\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.937762 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:17 crc kubenswrapper[4672]: W1007 15:10:17.948879 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63526151_0939_4fe1_b985_d7ec0c8d612b.slice/crio-e3bdd74b71e7b941fa6890051030ad7cf781eb0b3673c85a983e648d6839f0a0 WatchSource:0}: Error finding container e3bdd74b71e7b941fa6890051030ad7cf781eb0b3673c85a983e648d6839f0a0: Status 404 returned error can't find the container with id e3bdd74b71e7b941fa6890051030ad7cf781eb0b3673c85a983e648d6839f0a0 Oct 07 15:10:17 crc kubenswrapper[4672]: I1007 15:10:17.954978 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.034719 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.064695 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-597bd8d975-qqr42"] Oct 07 15:10:18 crc kubenswrapper[4672]: W1007 15:10:18.071123 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2796a4c8_8ad4_4f4d_b7c8_11c69815514a.slice/crio-b787118222eb277c1280eecc5e005c02e6e7706c9f55cff672329e3f8edbc74f WatchSource:0}: Error finding container b787118222eb277c1280eecc5e005c02e6e7706c9f55cff672329e3f8edbc74f: Status 404 returned error can't find the container with id b787118222eb277c1280eecc5e005c02e6e7706c9f55cff672329e3f8edbc74f Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.517960 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5dl6k"] Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.707470 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63526151-0939-4fe1-b985-d7ec0c8d612b","Type":"ContainerStarted","Data":"e3bdd74b71e7b941fa6890051030ad7cf781eb0b3673c85a983e648d6839f0a0"} Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.709575 4672 generic.go:334] "Generic (PLEG): container finished" podID="2796a4c8-8ad4-4f4d-b7c8-11c69815514a" containerID="0012f455e7b087748586b7bf637920fd7c137c3f86b41630f532099b0d00d174" exitCode=0 Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.709665 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" event={"ID":"2796a4c8-8ad4-4f4d-b7c8-11c69815514a","Type":"ContainerDied","Data":"0012f455e7b087748586b7bf637920fd7c137c3f86b41630f532099b0d00d174"} Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.709698 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" event={"ID":"2796a4c8-8ad4-4f4d-b7c8-11c69815514a","Type":"ContainerStarted","Data":"b787118222eb277c1280eecc5e005c02e6e7706c9f55cff672329e3f8edbc74f"} Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.711696 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b8f8b07-4874-45cd-b30b-d2348e9f65e1","Type":"ContainerStarted","Data":"a8804be1d81eabdc2495fe826eb92dfc6e87c83334bd48c2017ca1561d83adb8"} Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.715869 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fc94m" event={"ID":"12eb9041-9bc5-40ec-8d6e-8c2177b8acda","Type":"ContainerStarted","Data":"59c0137c856310ab049e75be224dbcee133c57eceaa4a3127dec7b142bf296e1"} Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.726206 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8cb32636-05d8-40b1-8dd1-d17b3d40dc02","Type":"ContainerStarted","Data":"42515b41ea5fa26dabe3280c51a12f66240e4f2b734f832e2d715b7a69e6b192"} Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.727527 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2e90ba10-2a7d-474b-8a81-0049bc3a998a","Type":"ContainerStarted","Data":"b57f56e8fc66f60a8912903b97ff09c010fb1bd622ccdd82f81ab7684789e393"} Oct 07 15:10:18 crc kubenswrapper[4672]: I1007 15:10:18.775870 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-fc94m" podStartSLOduration=2.775848363 podStartE2EDuration="2.775848363s" podCreationTimestamp="2025-10-07 15:10:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:18.748862808 +0000 UTC m=+1295.724041399" watchObservedRunningTime="2025-10-07 15:10:18.775848363 +0000 UTC m=+1295.751026944" Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.755806 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2e90ba10-2a7d-474b-8a81-0049bc3a998a","Type":"ContainerStarted","Data":"b215712ffb49a2e42389528ae2cec1bc44d6d54f52b3488704fc7923f5abcc40"} Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.758737 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63526151-0939-4fe1-b985-d7ec0c8d612b","Type":"ContainerStarted","Data":"f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a"} Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.760900 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5dl6k" event={"ID":"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529","Type":"ContainerStarted","Data":"318d792eaadc0dd492cfcfe638afb2223686e220ff0f1447ec875606ac16874a"} Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.760932 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5dl6k" event={"ID":"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529","Type":"ContainerStarted","Data":"899cc87e89837f72cb6846ae515f5ff13d59e3f7f303c34bf02ca312fb6c2248"} Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.766248 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.777921 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8cb32636-05d8-40b1-8dd1-d17b3d40dc02","Type":"ContainerStarted","Data":"b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b"} Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.784360 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-5dl6k" podStartSLOduration=3.784342765 podStartE2EDuration="3.784342765s" podCreationTimestamp="2025-10-07 15:10:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:20.775944981 +0000 UTC m=+1297.751123572" watchObservedRunningTime="2025-10-07 15:10:20.784342765 +0000 UTC m=+1297.759521346" Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.795516 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" podStartSLOduration=4.79550157 podStartE2EDuration="4.79550157s" podCreationTimestamp="2025-10-07 15:10:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:20.793234634 +0000 UTC m=+1297.768413215" watchObservedRunningTime="2025-10-07 15:10:20.79550157 +0000 UTC m=+1297.770680151" Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.812315 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.050040819 podStartE2EDuration="4.812292208s" podCreationTimestamp="2025-10-07 15:10:16 +0000 UTC" firstStartedPulling="2025-10-07 15:10:17.706158485 +0000 UTC m=+1294.681337066" lastFinishedPulling="2025-10-07 15:10:20.468409874 +0000 UTC m=+1297.443588455" observedRunningTime="2025-10-07 15:10:20.80684732 +0000 UTC m=+1297.782025901" watchObservedRunningTime="2025-10-07 15:10:20.812292208 +0000 UTC m=+1297.787470809" Oct 07 15:10:20 crc kubenswrapper[4672]: I1007 15:10:20.830088 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.11574713 podStartE2EDuration="4.830071445s" podCreationTimestamp="2025-10-07 15:10:16 +0000 UTC" firstStartedPulling="2025-10-07 15:10:17.70151947 +0000 UTC m=+1294.676698051" lastFinishedPulling="2025-10-07 15:10:20.415843785 +0000 UTC m=+1297.391022366" observedRunningTime="2025-10-07 15:10:20.822995459 +0000 UTC m=+1297.798174040" watchObservedRunningTime="2025-10-07 15:10:20.830071445 +0000 UTC m=+1297.805250026" Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.497989 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.518848 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.691932 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.692229 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="7f89db03-61a3-4391-aa60-fc306eb36c31" containerName="kube-state-metrics" containerID="cri-o://04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b" gracePeriod=30 Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.787262 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63526151-0939-4fe1-b985-d7ec0c8d612b","Type":"ContainerStarted","Data":"71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98"} Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.789004 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" event={"ID":"2796a4c8-8ad4-4f4d-b7c8-11c69815514a","Type":"ContainerStarted","Data":"0df6b9cca22acf7ed6ebd4284994c08d93e0bfb73a173003139991d8233f8903"} Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.790830 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b8f8b07-4874-45cd-b30b-d2348e9f65e1","Type":"ContainerStarted","Data":"7bd0d6db242e9284384b73e28e4c0ad4c6cce8a899d6e7581a708d368cc5cbdc"} Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.793459 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2e90ba10-2a7d-474b-8a81-0049bc3a998a","Type":"ContainerStarted","Data":"0b334c6caed1540482c291e6dd0bc5df3337d34dfcc3236b231b013e89e690a3"} Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.813999 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.331609213 podStartE2EDuration="5.81398479s" podCreationTimestamp="2025-10-07 15:10:16 +0000 UTC" firstStartedPulling="2025-10-07 15:10:17.95555221 +0000 UTC m=+1294.930730791" lastFinishedPulling="2025-10-07 15:10:20.437927787 +0000 UTC m=+1297.413106368" observedRunningTime="2025-10-07 15:10:21.81192672 +0000 UTC m=+1298.787105301" watchObservedRunningTime="2025-10-07 15:10:21.81398479 +0000 UTC m=+1298.789163371" Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.849255 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.386327224 podStartE2EDuration="5.849238254s" podCreationTimestamp="2025-10-07 15:10:16 +0000 UTC" firstStartedPulling="2025-10-07 15:10:17.951443011 +0000 UTC m=+1294.926621592" lastFinishedPulling="2025-10-07 15:10:20.414354041 +0000 UTC m=+1297.389532622" observedRunningTime="2025-10-07 15:10:21.84734425 +0000 UTC m=+1298.822522831" watchObservedRunningTime="2025-10-07 15:10:21.849238254 +0000 UTC m=+1298.824416835" Oct 07 15:10:21 crc kubenswrapper[4672]: I1007 15:10:21.901958 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="7f89db03-61a3-4391-aa60-fc306eb36c31" containerName="kube-state-metrics" probeResult="failure" output="Get \"http://10.217.0.103:8081/readyz\": dial tcp 10.217.0.103:8081: connect: connection refused" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.003544 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.005554 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.305047 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.337439 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.337523 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.385129 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mnvg\" (UniqueName: \"kubernetes.io/projected/7f89db03-61a3-4391-aa60-fc306eb36c31-kube-api-access-9mnvg\") pod \"7f89db03-61a3-4391-aa60-fc306eb36c31\" (UID: \"7f89db03-61a3-4391-aa60-fc306eb36c31\") " Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.391948 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f89db03-61a3-4391-aa60-fc306eb36c31-kube-api-access-9mnvg" (OuterVolumeSpecName: "kube-api-access-9mnvg") pod "7f89db03-61a3-4391-aa60-fc306eb36c31" (UID: "7f89db03-61a3-4391-aa60-fc306eb36c31"). InnerVolumeSpecName "kube-api-access-9mnvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.487046 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mnvg\" (UniqueName: \"kubernetes.io/projected/7f89db03-61a3-4391-aa60-fc306eb36c31-kube-api-access-9mnvg\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.803814 4672 generic.go:334] "Generic (PLEG): container finished" podID="7f89db03-61a3-4391-aa60-fc306eb36c31" containerID="04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b" exitCode=2 Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.803875 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.803949 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7f89db03-61a3-4391-aa60-fc306eb36c31","Type":"ContainerDied","Data":"04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b"} Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.803980 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7f89db03-61a3-4391-aa60-fc306eb36c31","Type":"ContainerDied","Data":"f0c9e7d3e3c57c3acea9a54114dbeb6cb3c0b2ddd7fe24a3c1b02e62d093f662"} Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.803997 4672 scope.go:117] "RemoveContainer" containerID="04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.804431 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="8cb32636-05d8-40b1-8dd1-d17b3d40dc02" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b" gracePeriod=30 Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.805343 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerName="nova-metadata-log" containerID="cri-o://f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a" gracePeriod=30 Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.805650 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerName="nova-metadata-metadata" containerID="cri-o://71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98" gracePeriod=30 Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.834581 4672 scope.go:117] "RemoveContainer" containerID="04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b" Oct 07 15:10:22 crc kubenswrapper[4672]: E1007 15:10:22.835152 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b\": container with ID starting with 04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b not found: ID does not exist" containerID="04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.835213 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b"} err="failed to get container status \"04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b\": rpc error: code = NotFound desc = could not find container \"04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b\": container with ID starting with 04a47a82a07446c0faf59a38a9560e9c6ae2a76ddac2d2f4bbd7f5423630934b not found: ID does not exist" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.856475 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.864547 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.880931 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 15:10:22 crc kubenswrapper[4672]: E1007 15:10:22.881436 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f89db03-61a3-4391-aa60-fc306eb36c31" containerName="kube-state-metrics" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.881459 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f89db03-61a3-4391-aa60-fc306eb36c31" containerName="kube-state-metrics" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.881710 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f89db03-61a3-4391-aa60-fc306eb36c31" containerName="kube-state-metrics" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.882501 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.886570 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.886652 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.889915 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.996342 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0eb79c67-08e6-4bfc-9b12-333500d26d9c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.996822 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ghf2\" (UniqueName: \"kubernetes.io/projected/0eb79c67-08e6-4bfc-9b12-333500d26d9c-kube-api-access-8ghf2\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.996876 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb79c67-08e6-4bfc-9b12-333500d26d9c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:22 crc kubenswrapper[4672]: I1007 15:10:22.996941 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0eb79c67-08e6-4bfc-9b12-333500d26d9c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.099221 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ghf2\" (UniqueName: \"kubernetes.io/projected/0eb79c67-08e6-4bfc-9b12-333500d26d9c-kube-api-access-8ghf2\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.099279 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb79c67-08e6-4bfc-9b12-333500d26d9c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.099318 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0eb79c67-08e6-4bfc-9b12-333500d26d9c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.099373 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0eb79c67-08e6-4bfc-9b12-333500d26d9c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.107323 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0eb79c67-08e6-4bfc-9b12-333500d26d9c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.107436 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0eb79c67-08e6-4bfc-9b12-333500d26d9c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.107494 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb79c67-08e6-4bfc-9b12-333500d26d9c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.124307 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ghf2\" (UniqueName: \"kubernetes.io/projected/0eb79c67-08e6-4bfc-9b12-333500d26d9c-kube-api-access-8ghf2\") pod \"kube-state-metrics-0\" (UID: \"0eb79c67-08e6-4bfc-9b12-333500d26d9c\") " pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.202338 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.514923 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.626093 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-combined-ca-bundle\") pod \"63526151-0939-4fe1-b985-d7ec0c8d612b\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.626359 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-config-data\") pod \"63526151-0939-4fe1-b985-d7ec0c8d612b\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.626394 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63526151-0939-4fe1-b985-d7ec0c8d612b-logs\") pod \"63526151-0939-4fe1-b985-d7ec0c8d612b\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.626413 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swf4z\" (UniqueName: \"kubernetes.io/projected/63526151-0939-4fe1-b985-d7ec0c8d612b-kube-api-access-swf4z\") pod \"63526151-0939-4fe1-b985-d7ec0c8d612b\" (UID: \"63526151-0939-4fe1-b985-d7ec0c8d612b\") " Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.628061 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63526151-0939-4fe1-b985-d7ec0c8d612b-logs" (OuterVolumeSpecName: "logs") pod "63526151-0939-4fe1-b985-d7ec0c8d612b" (UID: "63526151-0939-4fe1-b985-d7ec0c8d612b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.629105 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63526151-0939-4fe1-b985-d7ec0c8d612b-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.639584 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63526151-0939-4fe1-b985-d7ec0c8d612b-kube-api-access-swf4z" (OuterVolumeSpecName: "kube-api-access-swf4z") pod "63526151-0939-4fe1-b985-d7ec0c8d612b" (UID: "63526151-0939-4fe1-b985-d7ec0c8d612b"). InnerVolumeSpecName "kube-api-access-swf4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.664552 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-config-data" (OuterVolumeSpecName: "config-data") pod "63526151-0939-4fe1-b985-d7ec0c8d612b" (UID: "63526151-0939-4fe1-b985-d7ec0c8d612b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.667335 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63526151-0939-4fe1-b985-d7ec0c8d612b" (UID: "63526151-0939-4fe1-b985-d7ec0c8d612b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.729361 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.730751 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.730806 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swf4z\" (UniqueName: \"kubernetes.io/projected/63526151-0939-4fe1-b985-d7ec0c8d612b-kube-api-access-swf4z\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.730820 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63526151-0939-4fe1-b985-d7ec0c8d612b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.745577 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.745934 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="ceilometer-central-agent" containerID="cri-o://2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504" gracePeriod=30 Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.746111 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="ceilometer-notification-agent" containerID="cri-o://4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c" gracePeriod=30 Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.746123 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="sg-core" containerID="cri-o://03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4" gracePeriod=30 Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.746297 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="proxy-httpd" containerID="cri-o://2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274" gracePeriod=30 Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.769968 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.817765 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0eb79c67-08e6-4bfc-9b12-333500d26d9c","Type":"ContainerStarted","Data":"354f3d9769e5663b717dd11ad7010ed6835d55a71679c00bbd284333637cbda6"} Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.821301 4672 generic.go:334] "Generic (PLEG): container finished" podID="8cb32636-05d8-40b1-8dd1-d17b3d40dc02" containerID="b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b" exitCode=0 Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.821431 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8cb32636-05d8-40b1-8dd1-d17b3d40dc02","Type":"ContainerDied","Data":"b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b"} Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.821465 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8cb32636-05d8-40b1-8dd1-d17b3d40dc02","Type":"ContainerDied","Data":"42515b41ea5fa26dabe3280c51a12f66240e4f2b734f832e2d715b7a69e6b192"} Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.821488 4672 scope.go:117] "RemoveContainer" containerID="b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.821701 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.824892 4672 generic.go:334] "Generic (PLEG): container finished" podID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerID="71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98" exitCode=0 Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.824927 4672 generic.go:334] "Generic (PLEG): container finished" podID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerID="f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a" exitCode=143 Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.824989 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63526151-0939-4fe1-b985-d7ec0c8d612b","Type":"ContainerDied","Data":"71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98"} Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.825036 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63526151-0939-4fe1-b985-d7ec0c8d612b","Type":"ContainerDied","Data":"f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a"} Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.825049 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63526151-0939-4fe1-b985-d7ec0c8d612b","Type":"ContainerDied","Data":"e3bdd74b71e7b941fa6890051030ad7cf781eb0b3673c85a983e648d6839f0a0"} Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.825121 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.834754 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-combined-ca-bundle\") pod \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.835490 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-config-data\") pod \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.835730 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7tk6\" (UniqueName: \"kubernetes.io/projected/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-kube-api-access-v7tk6\") pod \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\" (UID: \"8cb32636-05d8-40b1-8dd1-d17b3d40dc02\") " Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.844802 4672 scope.go:117] "RemoveContainer" containerID="b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.844751 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-kube-api-access-v7tk6" (OuterVolumeSpecName: "kube-api-access-v7tk6") pod "8cb32636-05d8-40b1-8dd1-d17b3d40dc02" (UID: "8cb32636-05d8-40b1-8dd1-d17b3d40dc02"). InnerVolumeSpecName "kube-api-access-v7tk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:23 crc kubenswrapper[4672]: E1007 15:10:23.851343 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b\": container with ID starting with b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b not found: ID does not exist" containerID="b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.851387 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b"} err="failed to get container status \"b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b\": rpc error: code = NotFound desc = could not find container \"b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b\": container with ID starting with b58ad463499389f6e8b1eab5ca74e793bd5420f064bfe00b7884c6b292f2c45b not found: ID does not exist" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.851413 4672 scope.go:117] "RemoveContainer" containerID="71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.863144 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-config-data" (OuterVolumeSpecName: "config-data") pod "8cb32636-05d8-40b1-8dd1-d17b3d40dc02" (UID: "8cb32636-05d8-40b1-8dd1-d17b3d40dc02"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.882895 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8cb32636-05d8-40b1-8dd1-d17b3d40dc02" (UID: "8cb32636-05d8-40b1-8dd1-d17b3d40dc02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.906032 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f89db03-61a3-4391-aa60-fc306eb36c31" path="/var/lib/kubelet/pods/7f89db03-61a3-4391-aa60-fc306eb36c31/volumes" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.906232 4672 scope.go:117] "RemoveContainer" containerID="f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.907030 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.921047 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.933275 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:23 crc kubenswrapper[4672]: E1007 15:10:23.933742 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerName="nova-metadata-metadata" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.933766 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerName="nova-metadata-metadata" Oct 07 15:10:23 crc kubenswrapper[4672]: E1007 15:10:23.933789 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cb32636-05d8-40b1-8dd1-d17b3d40dc02" containerName="nova-cell1-novncproxy-novncproxy" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.933798 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cb32636-05d8-40b1-8dd1-d17b3d40dc02" containerName="nova-cell1-novncproxy-novncproxy" Oct 07 15:10:23 crc kubenswrapper[4672]: E1007 15:10:23.933829 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerName="nova-metadata-log" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.933837 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerName="nova-metadata-log" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.934106 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerName="nova-metadata-log" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.934138 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cb32636-05d8-40b1-8dd1-d17b3d40dc02" containerName="nova-cell1-novncproxy-novncproxy" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.934162 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="63526151-0939-4fe1-b985-d7ec0c8d612b" containerName="nova-metadata-metadata" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.935424 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.939575 4672 scope.go:117] "RemoveContainer" containerID="71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.940577 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 15:10:23 crc kubenswrapper[4672]: E1007 15:10:23.940630 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98\": container with ID starting with 71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98 not found: ID does not exist" containerID="71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.940675 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98"} err="failed to get container status \"71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98\": rpc error: code = NotFound desc = could not find container \"71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98\": container with ID starting with 71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98 not found: ID does not exist" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.940708 4672 scope.go:117] "RemoveContainer" containerID="f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.940974 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 07 15:10:23 crc kubenswrapper[4672]: E1007 15:10:23.941549 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a\": container with ID starting with f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a not found: ID does not exist" containerID="f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.941598 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a"} err="failed to get container status \"f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a\": rpc error: code = NotFound desc = could not find container \"f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a\": container with ID starting with f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a not found: ID does not exist" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.941631 4672 scope.go:117] "RemoveContainer" containerID="71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.941964 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98"} err="failed to get container status \"71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98\": rpc error: code = NotFound desc = could not find container \"71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98\": container with ID starting with 71e713345307a210b5ea3e0702f2bb89a86772970cc029d304cf2eadc8118f98 not found: ID does not exist" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.941995 4672 scope.go:117] "RemoveContainer" containerID="f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.942492 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a"} err="failed to get container status \"f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a\": rpc error: code = NotFound desc = could not find container \"f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a\": container with ID starting with f2f83dbf06194fd98c836b2d573ef1b8468d983e4dc9df8ae26c4e7627b3586a not found: ID does not exist" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.945605 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7tk6\" (UniqueName: \"kubernetes.io/projected/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-kube-api-access-v7tk6\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.945659 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.945674 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cb32636-05d8-40b1-8dd1-d17b3d40dc02-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:23 crc kubenswrapper[4672]: I1007 15:10:23.952149 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.048145 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-logs\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.048221 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.048268 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-config-data\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.048327 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp8sj\" (UniqueName: \"kubernetes.io/projected/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-kube-api-access-rp8sj\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.048379 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.146366 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.150005 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.150129 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-config-data\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.150184 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp8sj\" (UniqueName: \"kubernetes.io/projected/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-kube-api-access-rp8sj\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.150640 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.150817 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-logs\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.151224 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-logs\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.154911 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.155290 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-config-data\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.156712 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.157642 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.170281 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.171802 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.176455 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.176498 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp8sj\" (UniqueName: \"kubernetes.io/projected/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-kube-api-access-rp8sj\") pod \"nova-metadata-0\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.176841 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.177112 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.189414 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.252820 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m6dq\" (UniqueName: \"kubernetes.io/projected/696b0eb7-d73f-4a68-a991-d9af3b74fea2-kube-api-access-6m6dq\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.253088 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.253216 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.253355 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.253476 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.268427 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.355318 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.355591 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.355685 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.355755 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.355907 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m6dq\" (UniqueName: \"kubernetes.io/projected/696b0eb7-d73f-4a68-a991-d9af3b74fea2-kube-api-access-6m6dq\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.363544 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.366209 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.370655 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.370685 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/696b0eb7-d73f-4a68-a991-d9af3b74fea2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.372421 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m6dq\" (UniqueName: \"kubernetes.io/projected/696b0eb7-d73f-4a68-a991-d9af3b74fea2-kube-api-access-6m6dq\") pod \"nova-cell1-novncproxy-0\" (UID: \"696b0eb7-d73f-4a68-a991-d9af3b74fea2\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.451421 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.734574 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.841511 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad","Type":"ContainerStarted","Data":"7bd99659e2e3cd1d06dc255905fb9f45ef3861b13e442a7768a4b5d68166aba0"} Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.845418 4672 generic.go:334] "Generic (PLEG): container finished" podID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerID="2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274" exitCode=0 Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.845447 4672 generic.go:334] "Generic (PLEG): container finished" podID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerID="03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4" exitCode=2 Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.845456 4672 generic.go:334] "Generic (PLEG): container finished" podID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerID="2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504" exitCode=0 Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.845477 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerDied","Data":"2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274"} Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.845530 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerDied","Data":"03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4"} Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.845548 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerDied","Data":"2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504"} Oct 07 15:10:24 crc kubenswrapper[4672]: I1007 15:10:24.913576 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 15:10:24 crc kubenswrapper[4672]: W1007 15:10:24.915478 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod696b0eb7_d73f_4a68_a991_d9af3b74fea2.slice/crio-df81fa034242066714dc6c4061ddfaad07bfb231987aacfa830794a4b0a082db WatchSource:0}: Error finding container df81fa034242066714dc6c4061ddfaad07bfb231987aacfa830794a4b0a082db: Status 404 returned error can't find the container with id df81fa034242066714dc6c4061ddfaad07bfb231987aacfa830794a4b0a082db Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.856005 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad","Type":"ContainerStarted","Data":"ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71"} Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.856068 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad","Type":"ContainerStarted","Data":"5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f"} Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.859050 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0eb79c67-08e6-4bfc-9b12-333500d26d9c","Type":"ContainerStarted","Data":"af27acc70e1b1411dc35643ea975934e70c4f9771040aab63ce1e7a56f6529da"} Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.859550 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.861096 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"696b0eb7-d73f-4a68-a991-d9af3b74fea2","Type":"ContainerStarted","Data":"367676ac24f00fd02a69f86e4cde8bc794fedb974ce4463e48f36d39dbdb5416"} Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.861122 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"696b0eb7-d73f-4a68-a991-d9af3b74fea2","Type":"ContainerStarted","Data":"df81fa034242066714dc6c4061ddfaad07bfb231987aacfa830794a4b0a082db"} Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.882202 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.8821849410000002 podStartE2EDuration="2.882184941s" podCreationTimestamp="2025-10-07 15:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:25.880925594 +0000 UTC m=+1302.856104195" watchObservedRunningTime="2025-10-07 15:10:25.882184941 +0000 UTC m=+1302.857363512" Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.910253 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63526151-0939-4fe1-b985-d7ec0c8d612b" path="/var/lib/kubelet/pods/63526151-0939-4fe1-b985-d7ec0c8d612b/volumes" Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.910995 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cb32636-05d8-40b1-8dd1-d17b3d40dc02" path="/var/lib/kubelet/pods/8cb32636-05d8-40b1-8dd1-d17b3d40dc02/volumes" Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.923456 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.678833343 podStartE2EDuration="3.923437561s" podCreationTimestamp="2025-10-07 15:10:22 +0000 UTC" firstStartedPulling="2025-10-07 15:10:23.772069354 +0000 UTC m=+1300.747247935" lastFinishedPulling="2025-10-07 15:10:25.016673572 +0000 UTC m=+1301.991852153" observedRunningTime="2025-10-07 15:10:25.906519399 +0000 UTC m=+1302.881698000" watchObservedRunningTime="2025-10-07 15:10:25.923437561 +0000 UTC m=+1302.898616142" Oct 07 15:10:25 crc kubenswrapper[4672]: I1007 15:10:25.936148 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.93613089 podStartE2EDuration="1.93613089s" podCreationTimestamp="2025-10-07 15:10:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:25.931885977 +0000 UTC m=+1302.907064558" watchObservedRunningTime="2025-10-07 15:10:25.93613089 +0000 UTC m=+1302.911309471" Oct 07 15:10:26 crc kubenswrapper[4672]: I1007 15:10:26.650438 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:10:26 crc kubenswrapper[4672]: I1007 15:10:26.650796 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:10:26 crc kubenswrapper[4672]: I1007 15:10:26.871511 4672 generic.go:334] "Generic (PLEG): container finished" podID="12eb9041-9bc5-40ec-8d6e-8c2177b8acda" containerID="59c0137c856310ab049e75be224dbcee133c57eceaa4a3127dec7b142bf296e1" exitCode=0 Oct 07 15:10:26 crc kubenswrapper[4672]: I1007 15:10:26.871648 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fc94m" event={"ID":"12eb9041-9bc5-40ec-8d6e-8c2177b8acda","Type":"ContainerDied","Data":"59c0137c856310ab049e75be224dbcee133c57eceaa4a3127dec7b142bf296e1"} Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.005993 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.030860 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.326057 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.326169 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.358154 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.415149 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd4749fc-v9n9w"] Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.415390 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" podUID="8e76fa3b-d36e-4700-b046-1ba853b1730a" containerName="dnsmasq-dns" containerID="cri-o://33b8d2cf3e62fb61f9dd03cdb1112c5db182a1e9d794889e3694cf9751f13d5e" gracePeriod=10 Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.937603 4672 generic.go:334] "Generic (PLEG): container finished" podID="8e76fa3b-d36e-4700-b046-1ba853b1730a" containerID="33b8d2cf3e62fb61f9dd03cdb1112c5db182a1e9d794889e3694cf9751f13d5e" exitCode=0 Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.939145 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" event={"ID":"8e76fa3b-d36e-4700-b046-1ba853b1730a","Type":"ContainerDied","Data":"33b8d2cf3e62fb61f9dd03cdb1112c5db182a1e9d794889e3694cf9751f13d5e"} Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.939192 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" event={"ID":"8e76fa3b-d36e-4700-b046-1ba853b1730a","Type":"ContainerDied","Data":"802f170ca29eb7b21a8398f574634626be2dfe3e23cc29eb5448986593cd0abc"} Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.939208 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="802f170ca29eb7b21a8398f574634626be2dfe3e23cc29eb5448986593cd0abc" Oct 07 15:10:27 crc kubenswrapper[4672]: I1007 15:10:27.999326 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.014311 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.143909 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-nb\") pod \"8e76fa3b-d36e-4700-b046-1ba853b1730a\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.143973 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5m55\" (UniqueName: \"kubernetes.io/projected/8e76fa3b-d36e-4700-b046-1ba853b1730a-kube-api-access-c5m55\") pod \"8e76fa3b-d36e-4700-b046-1ba853b1730a\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.144085 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-svc\") pod \"8e76fa3b-d36e-4700-b046-1ba853b1730a\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.144213 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-sb\") pod \"8e76fa3b-d36e-4700-b046-1ba853b1730a\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.144247 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-swift-storage-0\") pod \"8e76fa3b-d36e-4700-b046-1ba853b1730a\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.144301 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-config\") pod \"8e76fa3b-d36e-4700-b046-1ba853b1730a\" (UID: \"8e76fa3b-d36e-4700-b046-1ba853b1730a\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.164237 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e76fa3b-d36e-4700-b046-1ba853b1730a-kube-api-access-c5m55" (OuterVolumeSpecName: "kube-api-access-c5m55") pod "8e76fa3b-d36e-4700-b046-1ba853b1730a" (UID: "8e76fa3b-d36e-4700-b046-1ba853b1730a"). InnerVolumeSpecName "kube-api-access-c5m55". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.247913 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8e76fa3b-d36e-4700-b046-1ba853b1730a" (UID: "8e76fa3b-d36e-4700-b046-1ba853b1730a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.249558 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5m55\" (UniqueName: \"kubernetes.io/projected/8e76fa3b-d36e-4700-b046-1ba853b1730a-kube-api-access-c5m55\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.249592 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.278085 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8e76fa3b-d36e-4700-b046-1ba853b1730a" (UID: "8e76fa3b-d36e-4700-b046-1ba853b1730a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.280422 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8e76fa3b-d36e-4700-b046-1ba853b1730a" (UID: "8e76fa3b-d36e-4700-b046-1ba853b1730a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.293887 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8e76fa3b-d36e-4700-b046-1ba853b1730a" (UID: "8e76fa3b-d36e-4700-b046-1ba853b1730a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.300534 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-config" (OuterVolumeSpecName: "config") pod "8e76fa3b-d36e-4700-b046-1ba853b1730a" (UID: "8e76fa3b-d36e-4700-b046-1ba853b1730a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.324258 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.352263 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.352312 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.352326 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.352337 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e76fa3b-d36e-4700-b046-1ba853b1730a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.410169 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.185:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.410642 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.185:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.454001 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-combined-ca-bundle\") pod \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.454119 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-scripts\") pod \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.454245 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jvkc\" (UniqueName: \"kubernetes.io/projected/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-kube-api-access-9jvkc\") pod \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.454291 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-config-data\") pod \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\" (UID: \"12eb9041-9bc5-40ec-8d6e-8c2177b8acda\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.471213 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-kube-api-access-9jvkc" (OuterVolumeSpecName: "kube-api-access-9jvkc") pod "12eb9041-9bc5-40ec-8d6e-8c2177b8acda" (UID: "12eb9041-9bc5-40ec-8d6e-8c2177b8acda"). InnerVolumeSpecName "kube-api-access-9jvkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.483202 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-scripts" (OuterVolumeSpecName: "scripts") pod "12eb9041-9bc5-40ec-8d6e-8c2177b8acda" (UID: "12eb9041-9bc5-40ec-8d6e-8c2177b8acda"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.483742 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-config-data" (OuterVolumeSpecName: "config-data") pod "12eb9041-9bc5-40ec-8d6e-8c2177b8acda" (UID: "12eb9041-9bc5-40ec-8d6e-8c2177b8acda"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.490387 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "12eb9041-9bc5-40ec-8d6e-8c2177b8acda" (UID: "12eb9041-9bc5-40ec-8d6e-8c2177b8acda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.556955 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.556987 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jvkc\" (UniqueName: \"kubernetes.io/projected/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-kube-api-access-9jvkc\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.557001 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.557026 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12eb9041-9bc5-40ec-8d6e-8c2177b8acda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.904949 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.952261 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fc94m" event={"ID":"12eb9041-9bc5-40ec-8d6e-8c2177b8acda","Type":"ContainerDied","Data":"89e732f742f05a05a116b5d08b3804c147af2816005ec581c69df318727244b5"} Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.952295 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fc94m" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.952299 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89e732f742f05a05a116b5d08b3804c147af2816005ec581c69df318727244b5" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.956277 4672 generic.go:334] "Generic (PLEG): container finished" podID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerID="4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c" exitCode=0 Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.956318 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerDied","Data":"4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c"} Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.956361 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.956380 4672 scope.go:117] "RemoveContainer" containerID="2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.956370 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a51442c9-d9dc-45ed-85c9-824be83a6d05","Type":"ContainerDied","Data":"0122c3b42d2f38862c5cce87b4b08b3a6c34f1878e3f642c521e7dc6e8f9733f"} Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.959200 4672 generic.go:334] "Generic (PLEG): container finished" podID="7e50c0a4-dded-4ae4-9b1d-a3e06c43c529" containerID="318d792eaadc0dd492cfcfe638afb2223686e220ff0f1447ec875606ac16874a" exitCode=0 Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.959288 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd4749fc-v9n9w" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.959301 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5dl6k" event={"ID":"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529","Type":"ContainerDied","Data":"318d792eaadc0dd492cfcfe638afb2223686e220ff0f1447ec875606ac16874a"} Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.965772 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-log-httpd\") pod \"a51442c9-d9dc-45ed-85c9-824be83a6d05\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.966207 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-run-httpd\") pod \"a51442c9-d9dc-45ed-85c9-824be83a6d05\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.966345 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-sg-core-conf-yaml\") pod \"a51442c9-d9dc-45ed-85c9-824be83a6d05\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.966382 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-config-data\") pod \"a51442c9-d9dc-45ed-85c9-824be83a6d05\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.966437 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-combined-ca-bundle\") pod \"a51442c9-d9dc-45ed-85c9-824be83a6d05\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.966502 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcmwq\" (UniqueName: \"kubernetes.io/projected/a51442c9-d9dc-45ed-85c9-824be83a6d05-kube-api-access-jcmwq\") pod \"a51442c9-d9dc-45ed-85c9-824be83a6d05\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.966535 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-scripts\") pod \"a51442c9-d9dc-45ed-85c9-824be83a6d05\" (UID: \"a51442c9-d9dc-45ed-85c9-824be83a6d05\") " Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.976517 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a51442c9-d9dc-45ed-85c9-824be83a6d05" (UID: "a51442c9-d9dc-45ed-85c9-824be83a6d05"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.976699 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a51442c9-d9dc-45ed-85c9-824be83a6d05" (UID: "a51442c9-d9dc-45ed-85c9-824be83a6d05"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.984208 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-scripts" (OuterVolumeSpecName: "scripts") pod "a51442c9-d9dc-45ed-85c9-824be83a6d05" (UID: "a51442c9-d9dc-45ed-85c9-824be83a6d05"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.985632 4672 scope.go:117] "RemoveContainer" containerID="03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4" Oct 07 15:10:28 crc kubenswrapper[4672]: I1007 15:10:28.996933 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a51442c9-d9dc-45ed-85c9-824be83a6d05-kube-api-access-jcmwq" (OuterVolumeSpecName: "kube-api-access-jcmwq") pod "a51442c9-d9dc-45ed-85c9-824be83a6d05" (UID: "a51442c9-d9dc-45ed-85c9-824be83a6d05"). InnerVolumeSpecName "kube-api-access-jcmwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.004987 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a51442c9-d9dc-45ed-85c9-824be83a6d05" (UID: "a51442c9-d9dc-45ed-85c9-824be83a6d05"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.071613 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcmwq\" (UniqueName: \"kubernetes.io/projected/a51442c9-d9dc-45ed-85c9-824be83a6d05-kube-api-access-jcmwq\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.071830 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.071840 4672 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.071849 4672 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a51442c9-d9dc-45ed-85c9-824be83a6d05-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.071858 4672 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.087437 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.087723 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-log" containerID="cri-o://b215712ffb49a2e42389528ae2cec1bc44d6d54f52b3488704fc7923f5abcc40" gracePeriod=30 Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.088167 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-api" containerID="cri-o://0b334c6caed1540482c291e6dd0bc5df3337d34dfcc3236b231b013e89e690a3" gracePeriod=30 Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.099442 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.128070 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a51442c9-d9dc-45ed-85c9-824be83a6d05" (UID: "a51442c9-d9dc-45ed-85c9-824be83a6d05"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.160152 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd4749fc-v9n9w"] Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.162880 4672 scope.go:117] "RemoveContainer" containerID="4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.170228 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78cd4749fc-v9n9w"] Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.173956 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-config-data" (OuterVolumeSpecName: "config-data") pod "a51442c9-d9dc-45ed-85c9-824be83a6d05" (UID: "a51442c9-d9dc-45ed-85c9-824be83a6d05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.174979 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.175026 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51442c9-d9dc-45ed-85c9-824be83a6d05-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.179322 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.179719 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerName="nova-metadata-log" containerID="cri-o://5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f" gracePeriod=30 Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.179964 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerName="nova-metadata-metadata" containerID="cri-o://ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71" gracePeriod=30 Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.196996 4672 scope.go:117] "RemoveContainer" containerID="2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.225596 4672 scope.go:117] "RemoveContainer" containerID="2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.226038 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274\": container with ID starting with 2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274 not found: ID does not exist" containerID="2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.226065 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274"} err="failed to get container status \"2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274\": rpc error: code = NotFound desc = could not find container \"2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274\": container with ID starting with 2b78fe126353d98f6776b2c5c684c53589b16c113ef56a015561943f70398274 not found: ID does not exist" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.226083 4672 scope.go:117] "RemoveContainer" containerID="03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.226670 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4\": container with ID starting with 03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4 not found: ID does not exist" containerID="03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.226732 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4"} err="failed to get container status \"03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4\": rpc error: code = NotFound desc = could not find container \"03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4\": container with ID starting with 03f2a5c6afb26fbc2d0d3eb9f74b7746fedccb85ef934d96bd137297c2363bd4 not found: ID does not exist" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.226761 4672 scope.go:117] "RemoveContainer" containerID="4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.227158 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c\": container with ID starting with 4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c not found: ID does not exist" containerID="4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.227182 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c"} err="failed to get container status \"4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c\": rpc error: code = NotFound desc = could not find container \"4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c\": container with ID starting with 4ef97471300c784ca54f3edc208c34790ae5ee15565a561cc3b9bae203cfed1c not found: ID does not exist" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.227196 4672 scope.go:117] "RemoveContainer" containerID="2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.227632 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504\": container with ID starting with 2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504 not found: ID does not exist" containerID="2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.227671 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504"} err="failed to get container status \"2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504\": rpc error: code = NotFound desc = could not find container \"2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504\": container with ID starting with 2e18a74e3541c9d0343fac2a94fcbaafd73890fbf1474fb7160100b3b7f59504 not found: ID does not exist" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.269157 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.269330 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.290115 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.300613 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.316427 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.316802 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="ceilometer-notification-agent" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.316820 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="ceilometer-notification-agent" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.316841 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="sg-core" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.316847 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="sg-core" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.316861 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="ceilometer-central-agent" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.316867 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="ceilometer-central-agent" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.316877 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e76fa3b-d36e-4700-b046-1ba853b1730a" containerName="dnsmasq-dns" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.316882 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e76fa3b-d36e-4700-b046-1ba853b1730a" containerName="dnsmasq-dns" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.316892 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12eb9041-9bc5-40ec-8d6e-8c2177b8acda" containerName="nova-manage" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.316897 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="12eb9041-9bc5-40ec-8d6e-8c2177b8acda" containerName="nova-manage" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.317776 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e76fa3b-d36e-4700-b046-1ba853b1730a" containerName="init" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.317789 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e76fa3b-d36e-4700-b046-1ba853b1730a" containerName="init" Oct 07 15:10:29 crc kubenswrapper[4672]: E1007 15:10:29.317821 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="proxy-httpd" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.317827 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="proxy-httpd" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.318000 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="ceilometer-central-agent" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.318033 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="ceilometer-notification-agent" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.318053 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e76fa3b-d36e-4700-b046-1ba853b1730a" containerName="dnsmasq-dns" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.318070 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="sg-core" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.318083 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="12eb9041-9bc5-40ec-8d6e-8c2177b8acda" containerName="nova-manage" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.318094 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" containerName="proxy-httpd" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.319857 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.328810 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.336780 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.336910 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.340007 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.451923 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.480291 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.480365 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-config-data\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.480411 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-scripts\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.480453 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-run-httpd\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.480511 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jgwk\" (UniqueName: \"kubernetes.io/projected/90397fae-df43-427c-b2f8-af7a7d8ebb68-kube-api-access-7jgwk\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.480539 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.480580 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.480722 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-log-httpd\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.582760 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-scripts\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.582833 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-run-httpd\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.582893 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jgwk\" (UniqueName: \"kubernetes.io/projected/90397fae-df43-427c-b2f8-af7a7d8ebb68-kube-api-access-7jgwk\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.582922 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.582967 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.583109 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-log-httpd\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.583213 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.583270 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-config-data\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.583584 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-run-httpd\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.583802 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-log-httpd\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.590110 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.591006 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-scripts\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.594914 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.604746 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.615576 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jgwk\" (UniqueName: \"kubernetes.io/projected/90397fae-df43-427c-b2f8-af7a7d8ebb68-kube-api-access-7jgwk\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.622305 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-config-data\") pod \"ceilometer-0\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.681087 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.839698 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.906310 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e76fa3b-d36e-4700-b046-1ba853b1730a" path="/var/lib/kubelet/pods/8e76fa3b-d36e-4700-b046-1ba853b1730a/volumes" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.907141 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a51442c9-d9dc-45ed-85c9-824be83a6d05" path="/var/lib/kubelet/pods/a51442c9-d9dc-45ed-85c9-824be83a6d05/volumes" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.978188 4672 generic.go:334] "Generic (PLEG): container finished" podID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerID="ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71" exitCode=0 Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.978241 4672 generic.go:334] "Generic (PLEG): container finished" podID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerID="5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f" exitCode=143 Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.978291 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad","Type":"ContainerDied","Data":"ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71"} Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.978321 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad","Type":"ContainerDied","Data":"5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f"} Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.978332 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad","Type":"ContainerDied","Data":"7bd99659e2e3cd1d06dc255905fb9f45ef3861b13e442a7768a4b5d68166aba0"} Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.978350 4672 scope.go:117] "RemoveContainer" containerID="ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.978469 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.987822 4672 generic.go:334] "Generic (PLEG): container finished" podID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerID="b215712ffb49a2e42389528ae2cec1bc44d6d54f52b3488704fc7923f5abcc40" exitCode=143 Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.988069 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2e90ba10-2a7d-474b-8a81-0049bc3a998a","Type":"ContainerDied","Data":"b215712ffb49a2e42389528ae2cec1bc44d6d54f52b3488704fc7923f5abcc40"} Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.988321 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8b8f8b07-4874-45cd-b30b-d2348e9f65e1" containerName="nova-scheduler-scheduler" containerID="cri-o://7bd0d6db242e9284384b73e28e4c0ad4c6cce8a899d6e7581a708d368cc5cbdc" gracePeriod=30 Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.992462 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-logs\") pod \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.992511 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp8sj\" (UniqueName: \"kubernetes.io/projected/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-kube-api-access-rp8sj\") pod \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.992649 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-combined-ca-bundle\") pod \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.992738 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-nova-metadata-tls-certs\") pod \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.992772 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-config-data\") pod \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\" (UID: \"4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad\") " Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.992854 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-logs" (OuterVolumeSpecName: "logs") pod "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" (UID: "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:10:29 crc kubenswrapper[4672]: I1007 15:10:29.993547 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.000359 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-kube-api-access-rp8sj" (OuterVolumeSpecName: "kube-api-access-rp8sj") pod "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" (UID: "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad"). InnerVolumeSpecName "kube-api-access-rp8sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.016250 4672 scope.go:117] "RemoveContainer" containerID="5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.022433 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-config-data" (OuterVolumeSpecName: "config-data") pod "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" (UID: "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.030155 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" (UID: "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.061894 4672 scope.go:117] "RemoveContainer" containerID="ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71" Oct 07 15:10:30 crc kubenswrapper[4672]: E1007 15:10:30.063822 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71\": container with ID starting with ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71 not found: ID does not exist" containerID="ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.063881 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71"} err="failed to get container status \"ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71\": rpc error: code = NotFound desc = could not find container \"ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71\": container with ID starting with ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71 not found: ID does not exist" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.063915 4672 scope.go:117] "RemoveContainer" containerID="5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f" Oct 07 15:10:30 crc kubenswrapper[4672]: E1007 15:10:30.065180 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f\": container with ID starting with 5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f not found: ID does not exist" containerID="5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.065205 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f"} err="failed to get container status \"5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f\": rpc error: code = NotFound desc = could not find container \"5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f\": container with ID starting with 5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f not found: ID does not exist" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.065234 4672 scope.go:117] "RemoveContainer" containerID="ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.065207 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" (UID: "4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.065528 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71"} err="failed to get container status \"ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71\": rpc error: code = NotFound desc = could not find container \"ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71\": container with ID starting with ce34935a3d3d3df891e1fda64a82677c433a568fc1ae080448f3459a33268d71 not found: ID does not exist" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.065566 4672 scope.go:117] "RemoveContainer" containerID="5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.066627 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f"} err="failed to get container status \"5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f\": rpc error: code = NotFound desc = could not find container \"5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f\": container with ID starting with 5ec324b7edef4aacdffa89697ec48e9c4442abe8e65a40446336b237b83d6a5f not found: ID does not exist" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.095287 4672 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.095320 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.095330 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp8sj\" (UniqueName: \"kubernetes.io/projected/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-kube-api-access-rp8sj\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.095339 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.158883 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:10:30 crc kubenswrapper[4672]: W1007 15:10:30.163354 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90397fae_df43_427c_b2f8_af7a7d8ebb68.slice/crio-97ee226486060ef683a5ed59a385643e02cdd0d0261e6550f46bb4f1ad7d3531 WatchSource:0}: Error finding container 97ee226486060ef683a5ed59a385643e02cdd0d0261e6550f46bb4f1ad7d3531: Status 404 returned error can't find the container with id 97ee226486060ef683a5ed59a385643e02cdd0d0261e6550f46bb4f1ad7d3531 Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.324774 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.340308 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.342607 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.356534 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:30 crc kubenswrapper[4672]: E1007 15:10:30.356960 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerName="nova-metadata-metadata" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.356971 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerName="nova-metadata-metadata" Oct 07 15:10:30 crc kubenswrapper[4672]: E1007 15:10:30.356984 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerName="nova-metadata-log" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.356990 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerName="nova-metadata-log" Oct 07 15:10:30 crc kubenswrapper[4672]: E1007 15:10:30.357032 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e50c0a4-dded-4ae4-9b1d-a3e06c43c529" containerName="nova-cell1-conductor-db-sync" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.357042 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e50c0a4-dded-4ae4-9b1d-a3e06c43c529" containerName="nova-cell1-conductor-db-sync" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.357266 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerName="nova-metadata-metadata" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.357284 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e50c0a4-dded-4ae4-9b1d-a3e06c43c529" containerName="nova-cell1-conductor-db-sync" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.357296 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" containerName="nova-metadata-log" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.358312 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.360186 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.360452 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.394061 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.502702 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsgls\" (UniqueName: \"kubernetes.io/projected/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-kube-api-access-lsgls\") pod \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.503028 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-combined-ca-bundle\") pod \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.503108 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-config-data\") pod \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.503134 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-scripts\") pod \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\" (UID: \"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529\") " Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.503417 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.503473 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v62td\" (UniqueName: \"kubernetes.io/projected/112df96d-519d-4989-a9d2-1bbb1c5f3496-kube-api-access-v62td\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.503501 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/112df96d-519d-4989-a9d2-1bbb1c5f3496-logs\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.503569 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-config-data\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.503694 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.508369 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-kube-api-access-lsgls" (OuterVolumeSpecName: "kube-api-access-lsgls") pod "7e50c0a4-dded-4ae4-9b1d-a3e06c43c529" (UID: "7e50c0a4-dded-4ae4-9b1d-a3e06c43c529"). InnerVolumeSpecName "kube-api-access-lsgls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.508463 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-scripts" (OuterVolumeSpecName: "scripts") pod "7e50c0a4-dded-4ae4-9b1d-a3e06c43c529" (UID: "7e50c0a4-dded-4ae4-9b1d-a3e06c43c529"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.536545 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-config-data" (OuterVolumeSpecName: "config-data") pod "7e50c0a4-dded-4ae4-9b1d-a3e06c43c529" (UID: "7e50c0a4-dded-4ae4-9b1d-a3e06c43c529"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.536809 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e50c0a4-dded-4ae4-9b1d-a3e06c43c529" (UID: "7e50c0a4-dded-4ae4-9b1d-a3e06c43c529"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.607727 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-config-data\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.607926 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.607993 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.608061 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v62td\" (UniqueName: \"kubernetes.io/projected/112df96d-519d-4989-a9d2-1bbb1c5f3496-kube-api-access-v62td\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.608092 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/112df96d-519d-4989-a9d2-1bbb1c5f3496-logs\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.608173 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.608195 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.608209 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsgls\" (UniqueName: \"kubernetes.io/projected/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-kube-api-access-lsgls\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.608224 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.608762 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/112df96d-519d-4989-a9d2-1bbb1c5f3496-logs\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.611552 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-config-data\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.612225 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.612293 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.630651 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v62td\" (UniqueName: \"kubernetes.io/projected/112df96d-519d-4989-a9d2-1bbb1c5f3496-kube-api-access-v62td\") pod \"nova-metadata-0\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " pod="openstack/nova-metadata-0" Oct 07 15:10:30 crc kubenswrapper[4672]: I1007 15:10:30.714130 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.025607 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerStarted","Data":"189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c"} Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.025658 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerStarted","Data":"ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265"} Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.025669 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerStarted","Data":"97ee226486060ef683a5ed59a385643e02cdd0d0261e6550f46bb4f1ad7d3531"} Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.027861 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5dl6k" event={"ID":"7e50c0a4-dded-4ae4-9b1d-a3e06c43c529","Type":"ContainerDied","Data":"899cc87e89837f72cb6846ae515f5ff13d59e3f7f303c34bf02ca312fb6c2248"} Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.027900 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="899cc87e89837f72cb6846ae515f5ff13d59e3f7f303c34bf02ca312fb6c2248" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.027958 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5dl6k" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.066406 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.068596 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.071298 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.110403 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.117823 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvcq8\" (UniqueName: \"kubernetes.io/projected/38be140b-8203-4f2a-8215-6c76db50244e-kube-api-access-mvcq8\") pod \"nova-cell1-conductor-0\" (UID: \"38be140b-8203-4f2a-8215-6c76db50244e\") " pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.117880 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38be140b-8203-4f2a-8215-6c76db50244e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"38be140b-8203-4f2a-8215-6c76db50244e\") " pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.118009 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38be140b-8203-4f2a-8215-6c76db50244e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"38be140b-8203-4f2a-8215-6c76db50244e\") " pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.183171 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:31 crc kubenswrapper[4672]: W1007 15:10:31.185738 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod112df96d_519d_4989_a9d2_1bbb1c5f3496.slice/crio-6c1b619c1220efbf7dba8ba1cff1eed64a219261cce1d52302a862b942dc1fb0 WatchSource:0}: Error finding container 6c1b619c1220efbf7dba8ba1cff1eed64a219261cce1d52302a862b942dc1fb0: Status 404 returned error can't find the container with id 6c1b619c1220efbf7dba8ba1cff1eed64a219261cce1d52302a862b942dc1fb0 Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.219837 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvcq8\" (UniqueName: \"kubernetes.io/projected/38be140b-8203-4f2a-8215-6c76db50244e-kube-api-access-mvcq8\") pod \"nova-cell1-conductor-0\" (UID: \"38be140b-8203-4f2a-8215-6c76db50244e\") " pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.219896 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38be140b-8203-4f2a-8215-6c76db50244e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"38be140b-8203-4f2a-8215-6c76db50244e\") " pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.219988 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38be140b-8203-4f2a-8215-6c76db50244e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"38be140b-8203-4f2a-8215-6c76db50244e\") " pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.225503 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38be140b-8203-4f2a-8215-6c76db50244e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"38be140b-8203-4f2a-8215-6c76db50244e\") " pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.227463 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38be140b-8203-4f2a-8215-6c76db50244e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"38be140b-8203-4f2a-8215-6c76db50244e\") " pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.242120 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvcq8\" (UniqueName: \"kubernetes.io/projected/38be140b-8203-4f2a-8215-6c76db50244e-kube-api-access-mvcq8\") pod \"nova-cell1-conductor-0\" (UID: \"38be140b-8203-4f2a-8215-6c76db50244e\") " pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.421091 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.865373 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 07 15:10:31 crc kubenswrapper[4672]: W1007 15:10:31.868273 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38be140b_8203_4f2a_8215_6c76db50244e.slice/crio-e97f1fae6e2eb2ebc4d0f3b98c32146a63d8638d0a43bd66862d5874446d7d33 WatchSource:0}: Error finding container e97f1fae6e2eb2ebc4d0f3b98c32146a63d8638d0a43bd66862d5874446d7d33: Status 404 returned error can't find the container with id e97f1fae6e2eb2ebc4d0f3b98c32146a63d8638d0a43bd66862d5874446d7d33 Oct 07 15:10:31 crc kubenswrapper[4672]: I1007 15:10:31.926448 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad" path="/var/lib/kubelet/pods/4caa91d5-bd56-47c6-aaf7-0cc1b3b1c5ad/volumes" Oct 07 15:10:32 crc kubenswrapper[4672]: E1007 15:10:32.007658 4672 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7bd0d6db242e9284384b73e28e4c0ad4c6cce8a899d6e7581a708d368cc5cbdc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 15:10:32 crc kubenswrapper[4672]: E1007 15:10:32.011735 4672 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7bd0d6db242e9284384b73e28e4c0ad4c6cce8a899d6e7581a708d368cc5cbdc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 15:10:32 crc kubenswrapper[4672]: E1007 15:10:32.012958 4672 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7bd0d6db242e9284384b73e28e4c0ad4c6cce8a899d6e7581a708d368cc5cbdc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 15:10:32 crc kubenswrapper[4672]: E1007 15:10:32.012996 4672 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8b8f8b07-4874-45cd-b30b-d2348e9f65e1" containerName="nova-scheduler-scheduler" Oct 07 15:10:32 crc kubenswrapper[4672]: I1007 15:10:32.038658 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"38be140b-8203-4f2a-8215-6c76db50244e","Type":"ContainerStarted","Data":"e97f1fae6e2eb2ebc4d0f3b98c32146a63d8638d0a43bd66862d5874446d7d33"} Oct 07 15:10:32 crc kubenswrapper[4672]: I1007 15:10:32.041173 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"112df96d-519d-4989-a9d2-1bbb1c5f3496","Type":"ContainerStarted","Data":"ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012"} Oct 07 15:10:32 crc kubenswrapper[4672]: I1007 15:10:32.041221 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"112df96d-519d-4989-a9d2-1bbb1c5f3496","Type":"ContainerStarted","Data":"6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb"} Oct 07 15:10:32 crc kubenswrapper[4672]: I1007 15:10:32.041232 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"112df96d-519d-4989-a9d2-1bbb1c5f3496","Type":"ContainerStarted","Data":"6c1b619c1220efbf7dba8ba1cff1eed64a219261cce1d52302a862b942dc1fb0"} Oct 07 15:10:32 crc kubenswrapper[4672]: I1007 15:10:32.062220 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.0622011 podStartE2EDuration="2.0622011s" podCreationTimestamp="2025-10-07 15:10:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:32.057900605 +0000 UTC m=+1309.033079206" watchObservedRunningTime="2025-10-07 15:10:32.0622011 +0000 UTC m=+1309.037379681" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.049947 4672 generic.go:334] "Generic (PLEG): container finished" podID="8b8f8b07-4874-45cd-b30b-d2348e9f65e1" containerID="7bd0d6db242e9284384b73e28e4c0ad4c6cce8a899d6e7581a708d368cc5cbdc" exitCode=0 Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.050124 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b8f8b07-4874-45cd-b30b-d2348e9f65e1","Type":"ContainerDied","Data":"7bd0d6db242e9284384b73e28e4c0ad4c6cce8a899d6e7581a708d368cc5cbdc"} Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.050428 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b8f8b07-4874-45cd-b30b-d2348e9f65e1","Type":"ContainerDied","Data":"a8804be1d81eabdc2495fe826eb92dfc6e87c83334bd48c2017ca1561d83adb8"} Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.050442 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8804be1d81eabdc2495fe826eb92dfc6e87c83334bd48c2017ca1561d83adb8" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.052637 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerStarted","Data":"6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958"} Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.055348 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"38be140b-8203-4f2a-8215-6c76db50244e","Type":"ContainerStarted","Data":"0e13a0b755272fdce93721334a285bc4d21805b9d8dbc43eeeb36969fa6aed7f"} Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.055380 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.073287 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.073270364 podStartE2EDuration="2.073270364s" podCreationTimestamp="2025-10-07 15:10:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:33.067590928 +0000 UTC m=+1310.042769539" watchObservedRunningTime="2025-10-07 15:10:33.073270364 +0000 UTC m=+1310.048448945" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.102412 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.167066 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvz69\" (UniqueName: \"kubernetes.io/projected/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-kube-api-access-dvz69\") pod \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.167137 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-config-data\") pod \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.167182 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-combined-ca-bundle\") pod \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\" (UID: \"8b8f8b07-4874-45cd-b30b-d2348e9f65e1\") " Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.178298 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-kube-api-access-dvz69" (OuterVolumeSpecName: "kube-api-access-dvz69") pod "8b8f8b07-4874-45cd-b30b-d2348e9f65e1" (UID: "8b8f8b07-4874-45cd-b30b-d2348e9f65e1"). InnerVolumeSpecName "kube-api-access-dvz69". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.197129 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-config-data" (OuterVolumeSpecName: "config-data") pod "8b8f8b07-4874-45cd-b30b-d2348e9f65e1" (UID: "8b8f8b07-4874-45cd-b30b-d2348e9f65e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.201283 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b8f8b07-4874-45cd-b30b-d2348e9f65e1" (UID: "8b8f8b07-4874-45cd-b30b-d2348e9f65e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.217919 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.269170 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvz69\" (UniqueName: \"kubernetes.io/projected/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-kube-api-access-dvz69\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.269206 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:33 crc kubenswrapper[4672]: I1007 15:10:33.269218 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8f8b07-4874-45cd-b30b-d2348e9f65e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.068515 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.068754 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerStarted","Data":"3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e"} Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.069872 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.110354 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.808754255 podStartE2EDuration="5.110316524s" podCreationTimestamp="2025-10-07 15:10:29 +0000 UTC" firstStartedPulling="2025-10-07 15:10:30.171879256 +0000 UTC m=+1307.147057837" lastFinishedPulling="2025-10-07 15:10:33.473441525 +0000 UTC m=+1310.448620106" observedRunningTime="2025-10-07 15:10:34.098135889 +0000 UTC m=+1311.073314480" watchObservedRunningTime="2025-10-07 15:10:34.110316524 +0000 UTC m=+1311.085495105" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.119616 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.130751 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.140450 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:34 crc kubenswrapper[4672]: E1007 15:10:34.140915 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b8f8b07-4874-45cd-b30b-d2348e9f65e1" containerName="nova-scheduler-scheduler" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.140945 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b8f8b07-4874-45cd-b30b-d2348e9f65e1" containerName="nova-scheduler-scheduler" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.141304 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b8f8b07-4874-45cd-b30b-d2348e9f65e1" containerName="nova-scheduler-scheduler" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.142064 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.149724 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.151764 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.186793 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-config-data\") pod \"nova-scheduler-0\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.187256 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krqvd\" (UniqueName: \"kubernetes.io/projected/e089e8e6-b44f-493e-bcc8-3758961c95b8-kube-api-access-krqvd\") pod \"nova-scheduler-0\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.187796 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.289830 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.290198 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-config-data\") pod \"nova-scheduler-0\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.290394 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krqvd\" (UniqueName: \"kubernetes.io/projected/e089e8e6-b44f-493e-bcc8-3758961c95b8-kube-api-access-krqvd\") pod \"nova-scheduler-0\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.295643 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.299834 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-config-data\") pod \"nova-scheduler-0\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.309090 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krqvd\" (UniqueName: \"kubernetes.io/projected/e089e8e6-b44f-493e-bcc8-3758961c95b8-kube-api-access-krqvd\") pod \"nova-scheduler-0\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.452514 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.468130 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.482087 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:34 crc kubenswrapper[4672]: I1007 15:10:34.966320 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.081338 4672 generic.go:334] "Generic (PLEG): container finished" podID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerID="0b334c6caed1540482c291e6dd0bc5df3337d34dfcc3236b231b013e89e690a3" exitCode=0 Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.081440 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2e90ba10-2a7d-474b-8a81-0049bc3a998a","Type":"ContainerDied","Data":"0b334c6caed1540482c291e6dd0bc5df3337d34dfcc3236b231b013e89e690a3"} Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.081682 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2e90ba10-2a7d-474b-8a81-0049bc3a998a","Type":"ContainerDied","Data":"b57f56e8fc66f60a8912903b97ff09c010fb1bd622ccdd82f81ab7684789e393"} Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.081702 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b57f56e8fc66f60a8912903b97ff09c010fb1bd622ccdd82f81ab7684789e393" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.084716 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e089e8e6-b44f-493e-bcc8-3758961c95b8","Type":"ContainerStarted","Data":"b1491cd6e2bb4409d8e72578eb9cbd5cac4e1a637b39ae9c4f2e834200ede25a"} Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.100679 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.134306 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.210659 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whw7f\" (UniqueName: \"kubernetes.io/projected/2e90ba10-2a7d-474b-8a81-0049bc3a998a-kube-api-access-whw7f\") pod \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.210714 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-combined-ca-bundle\") pod \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.210752 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-config-data\") pod \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.210842 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e90ba10-2a7d-474b-8a81-0049bc3a998a-logs\") pod \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\" (UID: \"2e90ba10-2a7d-474b-8a81-0049bc3a998a\") " Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.212553 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e90ba10-2a7d-474b-8a81-0049bc3a998a-logs" (OuterVolumeSpecName: "logs") pod "2e90ba10-2a7d-474b-8a81-0049bc3a998a" (UID: "2e90ba10-2a7d-474b-8a81-0049bc3a998a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.217463 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e90ba10-2a7d-474b-8a81-0049bc3a998a-kube-api-access-whw7f" (OuterVolumeSpecName: "kube-api-access-whw7f") pod "2e90ba10-2a7d-474b-8a81-0049bc3a998a" (UID: "2e90ba10-2a7d-474b-8a81-0049bc3a998a"). InnerVolumeSpecName "kube-api-access-whw7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.245582 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e90ba10-2a7d-474b-8a81-0049bc3a998a" (UID: "2e90ba10-2a7d-474b-8a81-0049bc3a998a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.260073 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-config-data" (OuterVolumeSpecName: "config-data") pod "2e90ba10-2a7d-474b-8a81-0049bc3a998a" (UID: "2e90ba10-2a7d-474b-8a81-0049bc3a998a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.313251 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e90ba10-2a7d-474b-8a81-0049bc3a998a-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.313292 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whw7f\" (UniqueName: \"kubernetes.io/projected/2e90ba10-2a7d-474b-8a81-0049bc3a998a-kube-api-access-whw7f\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.313308 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.313319 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e90ba10-2a7d-474b-8a81-0049bc3a998a-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.714698 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.715060 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 15:10:35 crc kubenswrapper[4672]: I1007 15:10:35.906742 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b8f8b07-4874-45cd-b30b-d2348e9f65e1" path="/var/lib/kubelet/pods/8b8f8b07-4874-45cd-b30b-d2348e9f65e1/volumes" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.097811 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e089e8e6-b44f-493e-bcc8-3758961c95b8","Type":"ContainerStarted","Data":"9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b"} Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.097861 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.116651 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.116630332 podStartE2EDuration="2.116630332s" podCreationTimestamp="2025-10-07 15:10:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:36.116436336 +0000 UTC m=+1313.091614917" watchObservedRunningTime="2025-10-07 15:10:36.116630332 +0000 UTC m=+1313.091808913" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.154254 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.178038 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.192358 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:36 crc kubenswrapper[4672]: E1007 15:10:36.192893 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-api" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.192922 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-api" Oct 07 15:10:36 crc kubenswrapper[4672]: E1007 15:10:36.192946 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-log" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.192954 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-log" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.193225 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-api" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.193278 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" containerName="nova-api-log" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.194473 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.200250 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.203193 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.229688 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.229800 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa99b6db-90e6-4181-8d52-8340e50fb74c-logs\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.230102 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lds24\" (UniqueName: \"kubernetes.io/projected/fa99b6db-90e6-4181-8d52-8340e50fb74c-kube-api-access-lds24\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.230403 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-config-data\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.332595 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.332670 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa99b6db-90e6-4181-8d52-8340e50fb74c-logs\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.332744 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lds24\" (UniqueName: \"kubernetes.io/projected/fa99b6db-90e6-4181-8d52-8340e50fb74c-kube-api-access-lds24\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.332807 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-config-data\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.333320 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa99b6db-90e6-4181-8d52-8340e50fb74c-logs\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.340729 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-config-data\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.349590 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lds24\" (UniqueName: \"kubernetes.io/projected/fa99b6db-90e6-4181-8d52-8340e50fb74c-kube-api-access-lds24\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.349625 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " pod="openstack/nova-api-0" Oct 07 15:10:36 crc kubenswrapper[4672]: I1007 15:10:36.521456 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:37 crc kubenswrapper[4672]: I1007 15:10:37.012601 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:37 crc kubenswrapper[4672]: I1007 15:10:37.106436 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa99b6db-90e6-4181-8d52-8340e50fb74c","Type":"ContainerStarted","Data":"7befc100edabc0596d14207413d2fed89aa88d0c12d4e302bfc9d48199dc00d7"} Oct 07 15:10:37 crc kubenswrapper[4672]: I1007 15:10:37.906529 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e90ba10-2a7d-474b-8a81-0049bc3a998a" path="/var/lib/kubelet/pods/2e90ba10-2a7d-474b-8a81-0049bc3a998a/volumes" Oct 07 15:10:38 crc kubenswrapper[4672]: I1007 15:10:38.118410 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa99b6db-90e6-4181-8d52-8340e50fb74c","Type":"ContainerStarted","Data":"be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee"} Oct 07 15:10:38 crc kubenswrapper[4672]: I1007 15:10:38.118454 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa99b6db-90e6-4181-8d52-8340e50fb74c","Type":"ContainerStarted","Data":"6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977"} Oct 07 15:10:38 crc kubenswrapper[4672]: I1007 15:10:38.139464 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.139446249 podStartE2EDuration="2.139446249s" podCreationTimestamp="2025-10-07 15:10:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:38.135795273 +0000 UTC m=+1315.110973844" watchObservedRunningTime="2025-10-07 15:10:38.139446249 +0000 UTC m=+1315.114624830" Oct 07 15:10:39 crc kubenswrapper[4672]: I1007 15:10:39.469123 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 07 15:10:40 crc kubenswrapper[4672]: I1007 15:10:40.714342 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 15:10:40 crc kubenswrapper[4672]: I1007 15:10:40.714675 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.448119 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.731288 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.731350 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.924331 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-gj4vc"] Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.925580 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.927919 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.928223 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.936208 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-scripts\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.936429 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.936603 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z2nf\" (UniqueName: \"kubernetes.io/projected/13223694-99fa-41ce-bc04-7b5387189b6d-kube-api-access-4z2nf\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.936705 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-config-data\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:41 crc kubenswrapper[4672]: I1007 15:10:41.955409 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-gj4vc"] Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.038976 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-config-data\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.039429 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-scripts\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.039522 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.039619 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z2nf\" (UniqueName: \"kubernetes.io/projected/13223694-99fa-41ce-bc04-7b5387189b6d-kube-api-access-4z2nf\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.045535 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-scripts\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.045846 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.052646 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-config-data\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.056484 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z2nf\" (UniqueName: \"kubernetes.io/projected/13223694-99fa-41ce-bc04-7b5387189b6d-kube-api-access-4z2nf\") pod \"nova-cell1-cell-mapping-gj4vc\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.256695 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:42 crc kubenswrapper[4672]: I1007 15:10:42.717445 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-gj4vc"] Oct 07 15:10:43 crc kubenswrapper[4672]: I1007 15:10:43.163131 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-gj4vc" event={"ID":"13223694-99fa-41ce-bc04-7b5387189b6d","Type":"ContainerStarted","Data":"29fddc08f4c8cab4953684cf0883adb40aefb2e1a486541fa433e12e0049243c"} Oct 07 15:10:43 crc kubenswrapper[4672]: I1007 15:10:43.163421 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-gj4vc" event={"ID":"13223694-99fa-41ce-bc04-7b5387189b6d","Type":"ContainerStarted","Data":"a78ce99f1bf464ce091c8cff5839cd572e7c8c54e52faf3a6e3668fd9af6cac4"} Oct 07 15:10:43 crc kubenswrapper[4672]: I1007 15:10:43.183879 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-gj4vc" podStartSLOduration=2.183862142 podStartE2EDuration="2.183862142s" podCreationTimestamp="2025-10-07 15:10:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:43.182706738 +0000 UTC m=+1320.157885319" watchObservedRunningTime="2025-10-07 15:10:43.183862142 +0000 UTC m=+1320.159040723" Oct 07 15:10:44 crc kubenswrapper[4672]: I1007 15:10:44.468781 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 07 15:10:44 crc kubenswrapper[4672]: I1007 15:10:44.507228 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 07 15:10:45 crc kubenswrapper[4672]: I1007 15:10:45.207979 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 07 15:10:46 crc kubenswrapper[4672]: I1007 15:10:46.522286 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 15:10:46 crc kubenswrapper[4672]: I1007 15:10:46.522555 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 15:10:47 crc kubenswrapper[4672]: I1007 15:10:47.604262 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 15:10:47 crc kubenswrapper[4672]: I1007 15:10:47.605490 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 15:10:48 crc kubenswrapper[4672]: I1007 15:10:48.208271 4672 generic.go:334] "Generic (PLEG): container finished" podID="13223694-99fa-41ce-bc04-7b5387189b6d" containerID="29fddc08f4c8cab4953684cf0883adb40aefb2e1a486541fa433e12e0049243c" exitCode=0 Oct 07 15:10:48 crc kubenswrapper[4672]: I1007 15:10:48.208382 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-gj4vc" event={"ID":"13223694-99fa-41ce-bc04-7b5387189b6d","Type":"ContainerDied","Data":"29fddc08f4c8cab4953684cf0883adb40aefb2e1a486541fa433e12e0049243c"} Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.539853 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.605335 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-config-data\") pod \"13223694-99fa-41ce-bc04-7b5387189b6d\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.605467 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4z2nf\" (UniqueName: \"kubernetes.io/projected/13223694-99fa-41ce-bc04-7b5387189b6d-kube-api-access-4z2nf\") pod \"13223694-99fa-41ce-bc04-7b5387189b6d\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.605504 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-combined-ca-bundle\") pod \"13223694-99fa-41ce-bc04-7b5387189b6d\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.605619 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-scripts\") pod \"13223694-99fa-41ce-bc04-7b5387189b6d\" (UID: \"13223694-99fa-41ce-bc04-7b5387189b6d\") " Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.610928 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13223694-99fa-41ce-bc04-7b5387189b6d-kube-api-access-4z2nf" (OuterVolumeSpecName: "kube-api-access-4z2nf") pod "13223694-99fa-41ce-bc04-7b5387189b6d" (UID: "13223694-99fa-41ce-bc04-7b5387189b6d"). InnerVolumeSpecName "kube-api-access-4z2nf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.615121 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-scripts" (OuterVolumeSpecName: "scripts") pod "13223694-99fa-41ce-bc04-7b5387189b6d" (UID: "13223694-99fa-41ce-bc04-7b5387189b6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.632782 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-config-data" (OuterVolumeSpecName: "config-data") pod "13223694-99fa-41ce-bc04-7b5387189b6d" (UID: "13223694-99fa-41ce-bc04-7b5387189b6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.641784 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13223694-99fa-41ce-bc04-7b5387189b6d" (UID: "13223694-99fa-41ce-bc04-7b5387189b6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.707540 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4z2nf\" (UniqueName: \"kubernetes.io/projected/13223694-99fa-41ce-bc04-7b5387189b6d-kube-api-access-4z2nf\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.707571 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.707583 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:49 crc kubenswrapper[4672]: I1007 15:10:49.707592 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13223694-99fa-41ce-bc04-7b5387189b6d-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.225571 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-gj4vc" event={"ID":"13223694-99fa-41ce-bc04-7b5387189b6d","Type":"ContainerDied","Data":"a78ce99f1bf464ce091c8cff5839cd572e7c8c54e52faf3a6e3668fd9af6cac4"} Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.225826 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-gj4vc" Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.225848 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a78ce99f1bf464ce091c8cff5839cd572e7c8c54e52faf3a6e3668fd9af6cac4" Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.320280 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.320650 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-log" containerID="cri-o://6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977" gracePeriod=30 Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.320957 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-api" containerID="cri-o://be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee" gracePeriod=30 Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.356155 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.356460 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e089e8e6-b44f-493e-bcc8-3758961c95b8" containerName="nova-scheduler-scheduler" containerID="cri-o://9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b" gracePeriod=30 Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.375063 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.375317 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-log" containerID="cri-o://6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb" gracePeriod=30 Oct 07 15:10:50 crc kubenswrapper[4672]: I1007 15:10:50.375810 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-metadata" containerID="cri-o://ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012" gracePeriod=30 Oct 07 15:10:51 crc kubenswrapper[4672]: I1007 15:10:51.235148 4672 generic.go:334] "Generic (PLEG): container finished" podID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerID="6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb" exitCode=143 Oct 07 15:10:51 crc kubenswrapper[4672]: I1007 15:10:51.235233 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"112df96d-519d-4989-a9d2-1bbb1c5f3496","Type":"ContainerDied","Data":"6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb"} Oct 07 15:10:51 crc kubenswrapper[4672]: I1007 15:10:51.237744 4672 generic.go:334] "Generic (PLEG): container finished" podID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerID="6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977" exitCode=143 Oct 07 15:10:51 crc kubenswrapper[4672]: I1007 15:10:51.237779 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa99b6db-90e6-4181-8d52-8340e50fb74c","Type":"ContainerDied","Data":"6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977"} Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.150372 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.247834 4672 generic.go:334] "Generic (PLEG): container finished" podID="e089e8e6-b44f-493e-bcc8-3758961c95b8" containerID="9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b" exitCode=0 Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.247885 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e089e8e6-b44f-493e-bcc8-3758961c95b8","Type":"ContainerDied","Data":"9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b"} Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.247889 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.247944 4672 scope.go:117] "RemoveContainer" containerID="9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.247914 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e089e8e6-b44f-493e-bcc8-3758961c95b8","Type":"ContainerDied","Data":"b1491cd6e2bb4409d8e72578eb9cbd5cac4e1a637b39ae9c4f2e834200ede25a"} Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.268975 4672 scope.go:117] "RemoveContainer" containerID="9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b" Oct 07 15:10:52 crc kubenswrapper[4672]: E1007 15:10:52.269654 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b\": container with ID starting with 9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b not found: ID does not exist" containerID="9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.270008 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b"} err="failed to get container status \"9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b\": rpc error: code = NotFound desc = could not find container \"9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b\": container with ID starting with 9bf59605b80b6dc84bbe56b6cc946ae181d6443d64c101249338c0184d3c1d9b not found: ID does not exist" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.272434 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krqvd\" (UniqueName: \"kubernetes.io/projected/e089e8e6-b44f-493e-bcc8-3758961c95b8-kube-api-access-krqvd\") pod \"e089e8e6-b44f-493e-bcc8-3758961c95b8\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.272606 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-combined-ca-bundle\") pod \"e089e8e6-b44f-493e-bcc8-3758961c95b8\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.272794 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-config-data\") pod \"e089e8e6-b44f-493e-bcc8-3758961c95b8\" (UID: \"e089e8e6-b44f-493e-bcc8-3758961c95b8\") " Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.278286 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e089e8e6-b44f-493e-bcc8-3758961c95b8-kube-api-access-krqvd" (OuterVolumeSpecName: "kube-api-access-krqvd") pod "e089e8e6-b44f-493e-bcc8-3758961c95b8" (UID: "e089e8e6-b44f-493e-bcc8-3758961c95b8"). InnerVolumeSpecName "kube-api-access-krqvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.298560 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e089e8e6-b44f-493e-bcc8-3758961c95b8" (UID: "e089e8e6-b44f-493e-bcc8-3758961c95b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.302027 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-config-data" (OuterVolumeSpecName: "config-data") pod "e089e8e6-b44f-493e-bcc8-3758961c95b8" (UID: "e089e8e6-b44f-493e-bcc8-3758961c95b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.374859 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.374899 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krqvd\" (UniqueName: \"kubernetes.io/projected/e089e8e6-b44f-493e-bcc8-3758961c95b8-kube-api-access-krqvd\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.374917 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e089e8e6-b44f-493e-bcc8-3758961c95b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.572518 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.580238 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.591659 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:52 crc kubenswrapper[4672]: E1007 15:10:52.592306 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e089e8e6-b44f-493e-bcc8-3758961c95b8" containerName="nova-scheduler-scheduler" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.592407 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="e089e8e6-b44f-493e-bcc8-3758961c95b8" containerName="nova-scheduler-scheduler" Oct 07 15:10:52 crc kubenswrapper[4672]: E1007 15:10:52.592497 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13223694-99fa-41ce-bc04-7b5387189b6d" containerName="nova-manage" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.592561 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="13223694-99fa-41ce-bc04-7b5387189b6d" containerName="nova-manage" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.592797 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="e089e8e6-b44f-493e-bcc8-3758961c95b8" containerName="nova-scheduler-scheduler" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.592870 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="13223694-99fa-41ce-bc04-7b5387189b6d" containerName="nova-manage" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.593693 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.596244 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.603593 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.679352 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dnvl\" (UniqueName: \"kubernetes.io/projected/21b9d12a-9d4c-49c3-8e94-80d538f4853a-kube-api-access-8dnvl\") pod \"nova-scheduler-0\" (UID: \"21b9d12a-9d4c-49c3-8e94-80d538f4853a\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.679527 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21b9d12a-9d4c-49c3-8e94-80d538f4853a-config-data\") pod \"nova-scheduler-0\" (UID: \"21b9d12a-9d4c-49c3-8e94-80d538f4853a\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.679568 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21b9d12a-9d4c-49c3-8e94-80d538f4853a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"21b9d12a-9d4c-49c3-8e94-80d538f4853a\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.781054 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dnvl\" (UniqueName: \"kubernetes.io/projected/21b9d12a-9d4c-49c3-8e94-80d538f4853a-kube-api-access-8dnvl\") pod \"nova-scheduler-0\" (UID: \"21b9d12a-9d4c-49c3-8e94-80d538f4853a\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.781117 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21b9d12a-9d4c-49c3-8e94-80d538f4853a-config-data\") pod \"nova-scheduler-0\" (UID: \"21b9d12a-9d4c-49c3-8e94-80d538f4853a\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.781139 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21b9d12a-9d4c-49c3-8e94-80d538f4853a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"21b9d12a-9d4c-49c3-8e94-80d538f4853a\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.785119 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21b9d12a-9d4c-49c3-8e94-80d538f4853a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"21b9d12a-9d4c-49c3-8e94-80d538f4853a\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.785150 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21b9d12a-9d4c-49c3-8e94-80d538f4853a-config-data\") pod \"nova-scheduler-0\" (UID: \"21b9d12a-9d4c-49c3-8e94-80d538f4853a\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.796381 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dnvl\" (UniqueName: \"kubernetes.io/projected/21b9d12a-9d4c-49c3-8e94-80d538f4853a-kube-api-access-8dnvl\") pod \"nova-scheduler-0\" (UID: \"21b9d12a-9d4c-49c3-8e94-80d538f4853a\") " pod="openstack/nova-scheduler-0" Oct 07 15:10:52 crc kubenswrapper[4672]: I1007 15:10:52.943036 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.360356 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 15:10:53 crc kubenswrapper[4672]: W1007 15:10:53.361159 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21b9d12a_9d4c_49c3_8e94_80d538f4853a.slice/crio-a9b0692ba8ed8f6b51216585054e575c0665203a17374f06e8bf5c06905d8d3a WatchSource:0}: Error finding container a9b0692ba8ed8f6b51216585054e575c0665203a17374f06e8bf5c06905d8d3a: Status 404 returned error can't find the container with id a9b0692ba8ed8f6b51216585054e575c0665203a17374f06e8bf5c06905d8d3a Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.817302 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.903502 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa99b6db-90e6-4181-8d52-8340e50fb74c-logs\") pod \"fa99b6db-90e6-4181-8d52-8340e50fb74c\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.903597 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-config-data\") pod \"fa99b6db-90e6-4181-8d52-8340e50fb74c\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.903631 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e089e8e6-b44f-493e-bcc8-3758961c95b8" path="/var/lib/kubelet/pods/e089e8e6-b44f-493e-bcc8-3758961c95b8/volumes" Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.903835 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lds24\" (UniqueName: \"kubernetes.io/projected/fa99b6db-90e6-4181-8d52-8340e50fb74c-kube-api-access-lds24\") pod \"fa99b6db-90e6-4181-8d52-8340e50fb74c\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.903936 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-combined-ca-bundle\") pod \"fa99b6db-90e6-4181-8d52-8340e50fb74c\" (UID: \"fa99b6db-90e6-4181-8d52-8340e50fb74c\") " Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.904179 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa99b6db-90e6-4181-8d52-8340e50fb74c-logs" (OuterVolumeSpecName: "logs") pod "fa99b6db-90e6-4181-8d52-8340e50fb74c" (UID: "fa99b6db-90e6-4181-8d52-8340e50fb74c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.904549 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa99b6db-90e6-4181-8d52-8340e50fb74c-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.907556 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa99b6db-90e6-4181-8d52-8340e50fb74c-kube-api-access-lds24" (OuterVolumeSpecName: "kube-api-access-lds24") pod "fa99b6db-90e6-4181-8d52-8340e50fb74c" (UID: "fa99b6db-90e6-4181-8d52-8340e50fb74c"). InnerVolumeSpecName "kube-api-access-lds24". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.929591 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa99b6db-90e6-4181-8d52-8340e50fb74c" (UID: "fa99b6db-90e6-4181-8d52-8340e50fb74c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:53 crc kubenswrapper[4672]: I1007 15:10:53.938968 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-config-data" (OuterVolumeSpecName: "config-data") pod "fa99b6db-90e6-4181-8d52-8340e50fb74c" (UID: "fa99b6db-90e6-4181-8d52-8340e50fb74c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.006315 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.006359 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lds24\" (UniqueName: \"kubernetes.io/projected/fa99b6db-90e6-4181-8d52-8340e50fb74c-kube-api-access-lds24\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.006373 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa99b6db-90e6-4181-8d52-8340e50fb74c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.030634 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.107974 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-nova-metadata-tls-certs\") pod \"112df96d-519d-4989-a9d2-1bbb1c5f3496\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.108114 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-config-data\") pod \"112df96d-519d-4989-a9d2-1bbb1c5f3496\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.108167 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-combined-ca-bundle\") pod \"112df96d-519d-4989-a9d2-1bbb1c5f3496\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.108250 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/112df96d-519d-4989-a9d2-1bbb1c5f3496-logs\") pod \"112df96d-519d-4989-a9d2-1bbb1c5f3496\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.108314 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v62td\" (UniqueName: \"kubernetes.io/projected/112df96d-519d-4989-a9d2-1bbb1c5f3496-kube-api-access-v62td\") pod \"112df96d-519d-4989-a9d2-1bbb1c5f3496\" (UID: \"112df96d-519d-4989-a9d2-1bbb1c5f3496\") " Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.108981 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/112df96d-519d-4989-a9d2-1bbb1c5f3496-logs" (OuterVolumeSpecName: "logs") pod "112df96d-519d-4989-a9d2-1bbb1c5f3496" (UID: "112df96d-519d-4989-a9d2-1bbb1c5f3496"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.113529 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/112df96d-519d-4989-a9d2-1bbb1c5f3496-kube-api-access-v62td" (OuterVolumeSpecName: "kube-api-access-v62td") pod "112df96d-519d-4989-a9d2-1bbb1c5f3496" (UID: "112df96d-519d-4989-a9d2-1bbb1c5f3496"). InnerVolumeSpecName "kube-api-access-v62td". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.134214 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-config-data" (OuterVolumeSpecName: "config-data") pod "112df96d-519d-4989-a9d2-1bbb1c5f3496" (UID: "112df96d-519d-4989-a9d2-1bbb1c5f3496"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.135256 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "112df96d-519d-4989-a9d2-1bbb1c5f3496" (UID: "112df96d-519d-4989-a9d2-1bbb1c5f3496"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.165806 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "112df96d-519d-4989-a9d2-1bbb1c5f3496" (UID: "112df96d-519d-4989-a9d2-1bbb1c5f3496"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.210280 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.210315 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/112df96d-519d-4989-a9d2-1bbb1c5f3496-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.210329 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v62td\" (UniqueName: \"kubernetes.io/projected/112df96d-519d-4989-a9d2-1bbb1c5f3496-kube-api-access-v62td\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.210342 4672 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.210354 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112df96d-519d-4989-a9d2-1bbb1c5f3496-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.271739 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"21b9d12a-9d4c-49c3-8e94-80d538f4853a","Type":"ContainerStarted","Data":"966427687acc765aa9ab172b99d5918bc47c5562057deaf8223f70806dab416b"} Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.272066 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"21b9d12a-9d4c-49c3-8e94-80d538f4853a","Type":"ContainerStarted","Data":"a9b0692ba8ed8f6b51216585054e575c0665203a17374f06e8bf5c06905d8d3a"} Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.275245 4672 generic.go:334] "Generic (PLEG): container finished" podID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerID="ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012" exitCode=0 Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.275415 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.275419 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"112df96d-519d-4989-a9d2-1bbb1c5f3496","Type":"ContainerDied","Data":"ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012"} Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.275690 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"112df96d-519d-4989-a9d2-1bbb1c5f3496","Type":"ContainerDied","Data":"6c1b619c1220efbf7dba8ba1cff1eed64a219261cce1d52302a862b942dc1fb0"} Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.275713 4672 scope.go:117] "RemoveContainer" containerID="ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.279385 4672 generic.go:334] "Generic (PLEG): container finished" podID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerID="be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee" exitCode=0 Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.279429 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa99b6db-90e6-4181-8d52-8340e50fb74c","Type":"ContainerDied","Data":"be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee"} Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.279454 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa99b6db-90e6-4181-8d52-8340e50fb74c","Type":"ContainerDied","Data":"7befc100edabc0596d14207413d2fed89aa88d0c12d4e302bfc9d48199dc00d7"} Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.279505 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.298116 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.298076795 podStartE2EDuration="2.298076795s" podCreationTimestamp="2025-10-07 15:10:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:54.296162549 +0000 UTC m=+1331.271341140" watchObservedRunningTime="2025-10-07 15:10:54.298076795 +0000 UTC m=+1331.273255376" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.340279 4672 scope.go:117] "RemoveContainer" containerID="6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.342876 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.352603 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.362693 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:54 crc kubenswrapper[4672]: E1007 15:10:54.363131 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-log" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.363152 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-log" Oct 07 15:10:54 crc kubenswrapper[4672]: E1007 15:10:54.363170 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-metadata" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.363176 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-metadata" Oct 07 15:10:54 crc kubenswrapper[4672]: E1007 15:10:54.363191 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-log" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.363197 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-log" Oct 07 15:10:54 crc kubenswrapper[4672]: E1007 15:10:54.363217 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-api" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.363222 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-api" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.363435 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-api" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.363446 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-metadata" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.363461 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" containerName="nova-api-log" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.363475 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" containerName="nova-metadata-log" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.364590 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.369293 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.382620 4672 scope.go:117] "RemoveContainer" containerID="ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012" Oct 07 15:10:54 crc kubenswrapper[4672]: E1007 15:10:54.383230 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012\": container with ID starting with ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012 not found: ID does not exist" containerID="ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.383281 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012"} err="failed to get container status \"ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012\": rpc error: code = NotFound desc = could not find container \"ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012\": container with ID starting with ae7c07e1084cd3aecaf835bdf707de0990abd9af82ab87f84db6b73f566d8012 not found: ID does not exist" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.383314 4672 scope.go:117] "RemoveContainer" containerID="6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb" Oct 07 15:10:54 crc kubenswrapper[4672]: E1007 15:10:54.383777 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb\": container with ID starting with 6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb not found: ID does not exist" containerID="6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.383804 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb"} err="failed to get container status \"6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb\": rpc error: code = NotFound desc = could not find container \"6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb\": container with ID starting with 6a119cdfe4f4311f8007ebf657f15d794d7f292d7deb5bc3d31d3d9f3e05d5eb not found: ID does not exist" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.383824 4672 scope.go:117] "RemoveContainer" containerID="be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.391106 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.403001 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.412577 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.413851 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.413897 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/568e7e83-504b-46ff-b63b-39939c5cedae-logs\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.413966 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-config-data\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.413989 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzd54\" (UniqueName: \"kubernetes.io/projected/568e7e83-504b-46ff-b63b-39939c5cedae-kube-api-access-tzd54\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.419795 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.422684 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.426120 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.426513 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.442156 4672 scope.go:117] "RemoveContainer" containerID="6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.451722 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.477810 4672 scope.go:117] "RemoveContainer" containerID="be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee" Oct 07 15:10:54 crc kubenswrapper[4672]: E1007 15:10:54.478337 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee\": container with ID starting with be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee not found: ID does not exist" containerID="be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.478672 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee"} err="failed to get container status \"be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee\": rpc error: code = NotFound desc = could not find container \"be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee\": container with ID starting with be95c429e2448e8aeb90fb284ec7b12d8213b1627ec7a34a7ed2994d3a5120ee not found: ID does not exist" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.478760 4672 scope.go:117] "RemoveContainer" containerID="6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977" Oct 07 15:10:54 crc kubenswrapper[4672]: E1007 15:10:54.479005 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977\": container with ID starting with 6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977 not found: ID does not exist" containerID="6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.479057 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977"} err="failed to get container status \"6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977\": rpc error: code = NotFound desc = could not find container \"6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977\": container with ID starting with 6eef1f45e5689ee51f527170ed931f4bef31ea5db93f73b0e05d5a6fd35e0977 not found: ID does not exist" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.515254 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/050ad2ed-cc5a-41bb-a2f9-437deee02938-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.515335 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/050ad2ed-cc5a-41bb-a2f9-437deee02938-logs\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.515502 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.515592 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/568e7e83-504b-46ff-b63b-39939c5cedae-logs\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.515720 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/050ad2ed-cc5a-41bb-a2f9-437deee02938-config-data\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.515762 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-config-data\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.515803 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzd54\" (UniqueName: \"kubernetes.io/projected/568e7e83-504b-46ff-b63b-39939c5cedae-kube-api-access-tzd54\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.515846 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/050ad2ed-cc5a-41bb-a2f9-437deee02938-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.515891 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzvjb\" (UniqueName: \"kubernetes.io/projected/050ad2ed-cc5a-41bb-a2f9-437deee02938-kube-api-access-mzvjb\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.516148 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/568e7e83-504b-46ff-b63b-39939c5cedae-logs\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.521504 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.522051 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-config-data\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.531794 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzd54\" (UniqueName: \"kubernetes.io/projected/568e7e83-504b-46ff-b63b-39939c5cedae-kube-api-access-tzd54\") pod \"nova-api-0\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.617369 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/050ad2ed-cc5a-41bb-a2f9-437deee02938-config-data\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.617441 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/050ad2ed-cc5a-41bb-a2f9-437deee02938-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.617462 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzvjb\" (UniqueName: \"kubernetes.io/projected/050ad2ed-cc5a-41bb-a2f9-437deee02938-kube-api-access-mzvjb\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.617483 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/050ad2ed-cc5a-41bb-a2f9-437deee02938-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.617498 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/050ad2ed-cc5a-41bb-a2f9-437deee02938-logs\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.617916 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/050ad2ed-cc5a-41bb-a2f9-437deee02938-logs\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.620914 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/050ad2ed-cc5a-41bb-a2f9-437deee02938-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.621199 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/050ad2ed-cc5a-41bb-a2f9-437deee02938-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.622179 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/050ad2ed-cc5a-41bb-a2f9-437deee02938-config-data\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.633086 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzvjb\" (UniqueName: \"kubernetes.io/projected/050ad2ed-cc5a-41bb-a2f9-437deee02938-kube-api-access-mzvjb\") pod \"nova-metadata-0\" (UID: \"050ad2ed-cc5a-41bb-a2f9-437deee02938\") " pod="openstack/nova-metadata-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.682581 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:10:54 crc kubenswrapper[4672]: I1007 15:10:54.749275 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 15:10:55 crc kubenswrapper[4672]: I1007 15:10:55.118628 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:10:55 crc kubenswrapper[4672]: I1007 15:10:55.259691 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 15:10:55 crc kubenswrapper[4672]: I1007 15:10:55.291573 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"568e7e83-504b-46ff-b63b-39939c5cedae","Type":"ContainerStarted","Data":"16b70d97500049241d803dde91829f61ec36ec4bccf950151faff7a044d3c8be"} Oct 07 15:10:55 crc kubenswrapper[4672]: I1007 15:10:55.293921 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"050ad2ed-cc5a-41bb-a2f9-437deee02938","Type":"ContainerStarted","Data":"862c401a0fb685167beaf2d622ac8808356d2759f30c0138e972c50da04e245d"} Oct 07 15:10:55 crc kubenswrapper[4672]: I1007 15:10:55.902325 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="112df96d-519d-4989-a9d2-1bbb1c5f3496" path="/var/lib/kubelet/pods/112df96d-519d-4989-a9d2-1bbb1c5f3496/volumes" Oct 07 15:10:55 crc kubenswrapper[4672]: I1007 15:10:55.903176 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa99b6db-90e6-4181-8d52-8340e50fb74c" path="/var/lib/kubelet/pods/fa99b6db-90e6-4181-8d52-8340e50fb74c/volumes" Oct 07 15:10:56 crc kubenswrapper[4672]: I1007 15:10:56.301765 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"050ad2ed-cc5a-41bb-a2f9-437deee02938","Type":"ContainerStarted","Data":"ac861b4b68b67cd42e751b7fb043841c997470644bcac0ee79d40804629add14"} Oct 07 15:10:56 crc kubenswrapper[4672]: I1007 15:10:56.302077 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"050ad2ed-cc5a-41bb-a2f9-437deee02938","Type":"ContainerStarted","Data":"fc0e2ff85067ff9518b39c1de59bb5c5098f2c6a75a0df4bfffc0614d36ad3bd"} Oct 07 15:10:56 crc kubenswrapper[4672]: I1007 15:10:56.303076 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"568e7e83-504b-46ff-b63b-39939c5cedae","Type":"ContainerStarted","Data":"215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9"} Oct 07 15:10:56 crc kubenswrapper[4672]: I1007 15:10:56.303100 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"568e7e83-504b-46ff-b63b-39939c5cedae","Type":"ContainerStarted","Data":"9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54"} Oct 07 15:10:56 crc kubenswrapper[4672]: I1007 15:10:56.317294 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.317276788 podStartE2EDuration="2.317276788s" podCreationTimestamp="2025-10-07 15:10:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:56.315411484 +0000 UTC m=+1333.290590065" watchObservedRunningTime="2025-10-07 15:10:56.317276788 +0000 UTC m=+1333.292455369" Oct 07 15:10:56 crc kubenswrapper[4672]: I1007 15:10:56.650337 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:10:56 crc kubenswrapper[4672]: I1007 15:10:56.650406 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:10:57 crc kubenswrapper[4672]: I1007 15:10:57.347607 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.347586842 podStartE2EDuration="3.347586842s" podCreationTimestamp="2025-10-07 15:10:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:10:57.336607743 +0000 UTC m=+1334.311786324" watchObservedRunningTime="2025-10-07 15:10:57.347586842 +0000 UTC m=+1334.322765423" Oct 07 15:10:57 crc kubenswrapper[4672]: I1007 15:10:57.943501 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 07 15:10:59 crc kubenswrapper[4672]: I1007 15:10:59.694827 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 07 15:10:59 crc kubenswrapper[4672]: I1007 15:10:59.751163 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 15:10:59 crc kubenswrapper[4672]: I1007 15:10:59.752566 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 15:11:02 crc kubenswrapper[4672]: I1007 15:11:02.943250 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 07 15:11:02 crc kubenswrapper[4672]: I1007 15:11:02.971452 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 07 15:11:03 crc kubenswrapper[4672]: I1007 15:11:03.384869 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 07 15:11:04 crc kubenswrapper[4672]: I1007 15:11:04.683479 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 15:11:04 crc kubenswrapper[4672]: I1007 15:11:04.683841 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 15:11:04 crc kubenswrapper[4672]: I1007 15:11:04.750039 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 15:11:04 crc kubenswrapper[4672]: I1007 15:11:04.750081 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 15:11:05 crc kubenswrapper[4672]: I1007 15:11:05.767282 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 15:11:05 crc kubenswrapper[4672]: I1007 15:11:05.778179 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 15:11:05 crc kubenswrapper[4672]: I1007 15:11:05.778192 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="050ad2ed-cc5a-41bb-a2f9-437deee02938" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 15:11:05 crc kubenswrapper[4672]: I1007 15:11:05.778191 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="050ad2ed-cc5a-41bb-a2f9-437deee02938" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.687232 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.687865 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.688203 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.688246 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.692039 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.692667 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.754088 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.754889 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.766538 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.858700 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c66bcfb47-hzd7c"] Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.860913 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.881488 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c66bcfb47-hzd7c"] Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.897637 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-svc\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.897724 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-config\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.897784 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-swift-storage-0\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.897804 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6jkk\" (UniqueName: \"kubernetes.io/projected/b4c79046-59e3-414e-ac5a-0bb51f43187c-kube-api-access-w6jkk\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.897825 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-sb\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.898545 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-nb\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.999584 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-nb\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.999685 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-svc\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.999740 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-config\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.999785 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-swift-storage-0\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.999808 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6jkk\" (UniqueName: \"kubernetes.io/projected/b4c79046-59e3-414e-ac5a-0bb51f43187c-kube-api-access-w6jkk\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:14 crc kubenswrapper[4672]: I1007 15:11:14.999832 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-sb\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:15 crc kubenswrapper[4672]: I1007 15:11:15.001081 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-config\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:15 crc kubenswrapper[4672]: I1007 15:11:15.001236 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-svc\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:15 crc kubenswrapper[4672]: I1007 15:11:15.001335 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-nb\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:15 crc kubenswrapper[4672]: I1007 15:11:15.001376 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-sb\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:15 crc kubenswrapper[4672]: I1007 15:11:15.001399 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-swift-storage-0\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:15 crc kubenswrapper[4672]: I1007 15:11:15.022182 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6jkk\" (UniqueName: \"kubernetes.io/projected/b4c79046-59e3-414e-ac5a-0bb51f43187c-kube-api-access-w6jkk\") pod \"dnsmasq-dns-6c66bcfb47-hzd7c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:15 crc kubenswrapper[4672]: I1007 15:11:15.184763 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:15 crc kubenswrapper[4672]: I1007 15:11:15.477239 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 15:11:15 crc kubenswrapper[4672]: I1007 15:11:15.720314 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c66bcfb47-hzd7c"] Oct 07 15:11:16 crc kubenswrapper[4672]: I1007 15:11:16.497122 4672 generic.go:334] "Generic (PLEG): container finished" podID="b4c79046-59e3-414e-ac5a-0bb51f43187c" containerID="9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47" exitCode=0 Oct 07 15:11:16 crc kubenswrapper[4672]: I1007 15:11:16.498034 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" event={"ID":"b4c79046-59e3-414e-ac5a-0bb51f43187c","Type":"ContainerDied","Data":"9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47"} Oct 07 15:11:16 crc kubenswrapper[4672]: I1007 15:11:16.498406 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" event={"ID":"b4c79046-59e3-414e-ac5a-0bb51f43187c","Type":"ContainerStarted","Data":"17426234c7e7a7ed11ee8e9f2e30df192ae897b8a05a5bade545ee518bb43ad1"} Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.059459 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.059816 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="ceilometer-central-agent" containerID="cri-o://ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265" gracePeriod=30 Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.059982 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="ceilometer-notification-agent" containerID="cri-o://189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c" gracePeriod=30 Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.059962 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="proxy-httpd" containerID="cri-o://3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e" gracePeriod=30 Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.060223 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="sg-core" containerID="cri-o://6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958" gracePeriod=30 Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.366174 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.512434 4672 generic.go:334] "Generic (PLEG): container finished" podID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerID="3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e" exitCode=0 Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.512503 4672 generic.go:334] "Generic (PLEG): container finished" podID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerID="6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958" exitCode=2 Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.512516 4672 generic.go:334] "Generic (PLEG): container finished" podID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerID="ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265" exitCode=0 Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.512571 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerDied","Data":"3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e"} Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.512605 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerDied","Data":"6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958"} Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.512623 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerDied","Data":"ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265"} Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.515476 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-log" containerID="cri-o://9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54" gracePeriod=30 Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.515562 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-api" containerID="cri-o://215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9" gracePeriod=30 Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.515630 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" event={"ID":"b4c79046-59e3-414e-ac5a-0bb51f43187c","Type":"ContainerStarted","Data":"5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02"} Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.515913 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:17 crc kubenswrapper[4672]: I1007 15:11:17.550966 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" podStartSLOduration=3.5509450879999997 podStartE2EDuration="3.550945088s" podCreationTimestamp="2025-10-07 15:11:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:11:17.535943571 +0000 UTC m=+1354.511122172" watchObservedRunningTime="2025-10-07 15:11:17.550945088 +0000 UTC m=+1354.526123669" Oct 07 15:11:18 crc kubenswrapper[4672]: I1007 15:11:18.526882 4672 generic.go:334] "Generic (PLEG): container finished" podID="568e7e83-504b-46ff-b63b-39939c5cedae" containerID="9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54" exitCode=143 Oct 07 15:11:18 crc kubenswrapper[4672]: I1007 15:11:18.526979 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"568e7e83-504b-46ff-b63b-39939c5cedae","Type":"ContainerDied","Data":"9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54"} Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.358134 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.391365 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-config-data\") pod \"90397fae-df43-427c-b2f8-af7a7d8ebb68\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.391432 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-combined-ca-bundle\") pod \"90397fae-df43-427c-b2f8-af7a7d8ebb68\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.391470 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-scripts\") pod \"90397fae-df43-427c-b2f8-af7a7d8ebb68\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.391506 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-sg-core-conf-yaml\") pod \"90397fae-df43-427c-b2f8-af7a7d8ebb68\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.391587 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-log-httpd\") pod \"90397fae-df43-427c-b2f8-af7a7d8ebb68\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.391626 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jgwk\" (UniqueName: \"kubernetes.io/projected/90397fae-df43-427c-b2f8-af7a7d8ebb68-kube-api-access-7jgwk\") pod \"90397fae-df43-427c-b2f8-af7a7d8ebb68\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.391649 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-ceilometer-tls-certs\") pod \"90397fae-df43-427c-b2f8-af7a7d8ebb68\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.391685 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-run-httpd\") pod \"90397fae-df43-427c-b2f8-af7a7d8ebb68\" (UID: \"90397fae-df43-427c-b2f8-af7a7d8ebb68\") " Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.392996 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "90397fae-df43-427c-b2f8-af7a7d8ebb68" (UID: "90397fae-df43-427c-b2f8-af7a7d8ebb68"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.396052 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "90397fae-df43-427c-b2f8-af7a7d8ebb68" (UID: "90397fae-df43-427c-b2f8-af7a7d8ebb68"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.400357 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-scripts" (OuterVolumeSpecName: "scripts") pod "90397fae-df43-427c-b2f8-af7a7d8ebb68" (UID: "90397fae-df43-427c-b2f8-af7a7d8ebb68"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.406989 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90397fae-df43-427c-b2f8-af7a7d8ebb68-kube-api-access-7jgwk" (OuterVolumeSpecName: "kube-api-access-7jgwk") pod "90397fae-df43-427c-b2f8-af7a7d8ebb68" (UID: "90397fae-df43-427c-b2f8-af7a7d8ebb68"). InnerVolumeSpecName "kube-api-access-7jgwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.433239 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "90397fae-df43-427c-b2f8-af7a7d8ebb68" (UID: "90397fae-df43-427c-b2f8-af7a7d8ebb68"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.475947 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "90397fae-df43-427c-b2f8-af7a7d8ebb68" (UID: "90397fae-df43-427c-b2f8-af7a7d8ebb68"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.482608 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90397fae-df43-427c-b2f8-af7a7d8ebb68" (UID: "90397fae-df43-427c-b2f8-af7a7d8ebb68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.493595 4672 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.493632 4672 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.493646 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jgwk\" (UniqueName: \"kubernetes.io/projected/90397fae-df43-427c-b2f8-af7a7d8ebb68-kube-api-access-7jgwk\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.493659 4672 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.493670 4672 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90397fae-df43-427c-b2f8-af7a7d8ebb68-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.493682 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.493692 4672 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.516120 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-config-data" (OuterVolumeSpecName: "config-data") pod "90397fae-df43-427c-b2f8-af7a7d8ebb68" (UID: "90397fae-df43-427c-b2f8-af7a7d8ebb68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.539924 4672 generic.go:334] "Generic (PLEG): container finished" podID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerID="189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c" exitCode=0 Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.539994 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.540044 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerDied","Data":"189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c"} Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.540773 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"90397fae-df43-427c-b2f8-af7a7d8ebb68","Type":"ContainerDied","Data":"97ee226486060ef683a5ed59a385643e02cdd0d0261e6550f46bb4f1ad7d3531"} Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.540800 4672 scope.go:117] "RemoveContainer" containerID="3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.562085 4672 scope.go:117] "RemoveContainer" containerID="6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.578406 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.588818 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.594552 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90397fae-df43-427c-b2f8-af7a7d8ebb68-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.601339 4672 scope.go:117] "RemoveContainer" containerID="189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.606538 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:11:19 crc kubenswrapper[4672]: E1007 15:11:19.607070 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="sg-core" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.607094 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="sg-core" Oct 07 15:11:19 crc kubenswrapper[4672]: E1007 15:11:19.607117 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="ceilometer-notification-agent" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.607126 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="ceilometer-notification-agent" Oct 07 15:11:19 crc kubenswrapper[4672]: E1007 15:11:19.607158 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="ceilometer-central-agent" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.607166 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="ceilometer-central-agent" Oct 07 15:11:19 crc kubenswrapper[4672]: E1007 15:11:19.607178 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="proxy-httpd" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.607183 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="proxy-httpd" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.607372 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="proxy-httpd" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.607398 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="ceilometer-central-agent" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.607409 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="ceilometer-notification-agent" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.607427 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" containerName="sg-core" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.609201 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.611497 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.611535 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.612177 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.615800 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.621653 4672 scope.go:117] "RemoveContainer" containerID="ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.647949 4672 scope.go:117] "RemoveContainer" containerID="3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e" Oct 07 15:11:19 crc kubenswrapper[4672]: E1007 15:11:19.648396 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e\": container with ID starting with 3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e not found: ID does not exist" containerID="3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.648446 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e"} err="failed to get container status \"3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e\": rpc error: code = NotFound desc = could not find container \"3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e\": container with ID starting with 3119318f59175146f370a0c381cf7cc4eb2adcc84b2000bb447cb370b60c516e not found: ID does not exist" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.648481 4672 scope.go:117] "RemoveContainer" containerID="6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958" Oct 07 15:11:19 crc kubenswrapper[4672]: E1007 15:11:19.648977 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958\": container with ID starting with 6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958 not found: ID does not exist" containerID="6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.649052 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958"} err="failed to get container status \"6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958\": rpc error: code = NotFound desc = could not find container \"6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958\": container with ID starting with 6eb4e74b0288ac12cb9984113189efc518b2d4a71e4a7d63ce1e52d3e2213958 not found: ID does not exist" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.649076 4672 scope.go:117] "RemoveContainer" containerID="189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c" Oct 07 15:11:19 crc kubenswrapper[4672]: E1007 15:11:19.649644 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c\": container with ID starting with 189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c not found: ID does not exist" containerID="189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.649678 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c"} err="failed to get container status \"189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c\": rpc error: code = NotFound desc = could not find container \"189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c\": container with ID starting with 189ba6e966abeb392125092ae6b409e313490559dbd74e3877ddeeaddcc1264c not found: ID does not exist" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.649696 4672 scope.go:117] "RemoveContainer" containerID="ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265" Oct 07 15:11:19 crc kubenswrapper[4672]: E1007 15:11:19.650912 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265\": container with ID starting with ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265 not found: ID does not exist" containerID="ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.650945 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265"} err="failed to get container status \"ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265\": rpc error: code = NotFound desc = could not find container \"ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265\": container with ID starting with ae01dbd56039be888e6ad7f42d09837149ef327460a8191d64fc9744d5970265 not found: ID does not exist" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.798413 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.798475 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35a2c542-5b6e-4828-a670-fc4345572dc0-log-httpd\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.798647 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96fv2\" (UniqueName: \"kubernetes.io/projected/35a2c542-5b6e-4828-a670-fc4345572dc0-kube-api-access-96fv2\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.798730 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35a2c542-5b6e-4828-a670-fc4345572dc0-run-httpd\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.798809 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.798940 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.799120 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-config-data\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.799174 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-scripts\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.900588 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.900657 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35a2c542-5b6e-4828-a670-fc4345572dc0-log-httpd\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.900723 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96fv2\" (UniqueName: \"kubernetes.io/projected/35a2c542-5b6e-4828-a670-fc4345572dc0-kube-api-access-96fv2\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.900764 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35a2c542-5b6e-4828-a670-fc4345572dc0-run-httpd\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.900814 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.900865 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.900901 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-config-data\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.900927 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-scripts\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.901287 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35a2c542-5b6e-4828-a670-fc4345572dc0-log-httpd\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.901748 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35a2c542-5b6e-4828-a670-fc4345572dc0-run-httpd\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.905310 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-config-data\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.905344 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.906375 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.906465 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.906601 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a2c542-5b6e-4828-a670-fc4345572dc0-scripts\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.908197 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90397fae-df43-427c-b2f8-af7a7d8ebb68" path="/var/lib/kubelet/pods/90397fae-df43-427c-b2f8-af7a7d8ebb68/volumes" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.922599 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96fv2\" (UniqueName: \"kubernetes.io/projected/35a2c542-5b6e-4828-a670-fc4345572dc0-kube-api-access-96fv2\") pod \"ceilometer-0\" (UID: \"35a2c542-5b6e-4828-a670-fc4345572dc0\") " pod="openstack/ceilometer-0" Oct 07 15:11:19 crc kubenswrapper[4672]: I1007 15:11:19.927179 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 15:11:20 crc kubenswrapper[4672]: I1007 15:11:20.407894 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 15:11:20 crc kubenswrapper[4672]: W1007 15:11:20.409417 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35a2c542_5b6e_4828_a670_fc4345572dc0.slice/crio-e7e02be8f0859401b45e93ef2a142f2536736209b31ac0d6d6d808d747f5b63c WatchSource:0}: Error finding container e7e02be8f0859401b45e93ef2a142f2536736209b31ac0d6d6d808d747f5b63c: Status 404 returned error can't find the container with id e7e02be8f0859401b45e93ef2a142f2536736209b31ac0d6d6d808d747f5b63c Oct 07 15:11:20 crc kubenswrapper[4672]: I1007 15:11:20.551987 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35a2c542-5b6e-4828-a670-fc4345572dc0","Type":"ContainerStarted","Data":"e7e02be8f0859401b45e93ef2a142f2536736209b31ac0d6d6d808d747f5b63c"} Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.165603 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.227863 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-combined-ca-bundle\") pod \"568e7e83-504b-46ff-b63b-39939c5cedae\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.227937 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/568e7e83-504b-46ff-b63b-39939c5cedae-logs\") pod \"568e7e83-504b-46ff-b63b-39939c5cedae\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.228004 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-config-data\") pod \"568e7e83-504b-46ff-b63b-39939c5cedae\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.228215 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzd54\" (UniqueName: \"kubernetes.io/projected/568e7e83-504b-46ff-b63b-39939c5cedae-kube-api-access-tzd54\") pod \"568e7e83-504b-46ff-b63b-39939c5cedae\" (UID: \"568e7e83-504b-46ff-b63b-39939c5cedae\") " Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.228459 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/568e7e83-504b-46ff-b63b-39939c5cedae-logs" (OuterVolumeSpecName: "logs") pod "568e7e83-504b-46ff-b63b-39939c5cedae" (UID: "568e7e83-504b-46ff-b63b-39939c5cedae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.228629 4672 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/568e7e83-504b-46ff-b63b-39939c5cedae-logs\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.232989 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/568e7e83-504b-46ff-b63b-39939c5cedae-kube-api-access-tzd54" (OuterVolumeSpecName: "kube-api-access-tzd54") pod "568e7e83-504b-46ff-b63b-39939c5cedae" (UID: "568e7e83-504b-46ff-b63b-39939c5cedae"). InnerVolumeSpecName "kube-api-access-tzd54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.270110 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-config-data" (OuterVolumeSpecName: "config-data") pod "568e7e83-504b-46ff-b63b-39939c5cedae" (UID: "568e7e83-504b-46ff-b63b-39939c5cedae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.292204 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "568e7e83-504b-46ff-b63b-39939c5cedae" (UID: "568e7e83-504b-46ff-b63b-39939c5cedae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.331958 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.331985 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzd54\" (UniqueName: \"kubernetes.io/projected/568e7e83-504b-46ff-b63b-39939c5cedae-kube-api-access-tzd54\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.331995 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568e7e83-504b-46ff-b63b-39939c5cedae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.563376 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35a2c542-5b6e-4828-a670-fc4345572dc0","Type":"ContainerStarted","Data":"4dce064c61729fbf3d7139dc0008989b37c76cf3d6daf4ef735aa545a589500e"} Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.563424 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35a2c542-5b6e-4828-a670-fc4345572dc0","Type":"ContainerStarted","Data":"b8ce65c5cc0a278521464b36bf2105eef78b260b3dd7795121ed8475f90de1d1"} Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.563435 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35a2c542-5b6e-4828-a670-fc4345572dc0","Type":"ContainerStarted","Data":"09cf18cc35fe48df4c43a27e707a3db8257f76a8902ab7501736af3c575f03f1"} Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.565318 4672 generic.go:334] "Generic (PLEG): container finished" podID="568e7e83-504b-46ff-b63b-39939c5cedae" containerID="215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9" exitCode=0 Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.565345 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"568e7e83-504b-46ff-b63b-39939c5cedae","Type":"ContainerDied","Data":"215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9"} Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.565361 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"568e7e83-504b-46ff-b63b-39939c5cedae","Type":"ContainerDied","Data":"16b70d97500049241d803dde91829f61ec36ec4bccf950151faff7a044d3c8be"} Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.565378 4672 scope.go:117] "RemoveContainer" containerID="215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.565489 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.598036 4672 scope.go:117] "RemoveContainer" containerID="9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.612062 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.623407 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.632849 4672 scope.go:117] "RemoveContainer" containerID="215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9" Oct 07 15:11:21 crc kubenswrapper[4672]: E1007 15:11:21.635139 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9\": container with ID starting with 215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9 not found: ID does not exist" containerID="215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.635182 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9"} err="failed to get container status \"215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9\": rpc error: code = NotFound desc = could not find container \"215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9\": container with ID starting with 215943d689d991f91899e762067b07952ad9afab1eb7a4e7aa8023f24242dcb9 not found: ID does not exist" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.635210 4672 scope.go:117] "RemoveContainer" containerID="9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54" Oct 07 15:11:21 crc kubenswrapper[4672]: E1007 15:11:21.635513 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54\": container with ID starting with 9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54 not found: ID does not exist" containerID="9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.635540 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54"} err="failed to get container status \"9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54\": rpc error: code = NotFound desc = could not find container \"9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54\": container with ID starting with 9144a857f4a1a4d095a10c909c2dcad7d850cc758601d1fb2d8c2ebe914adb54 not found: ID does not exist" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.654555 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 15:11:21 crc kubenswrapper[4672]: E1007 15:11:21.655330 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-api" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.655349 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-api" Oct 07 15:11:21 crc kubenswrapper[4672]: E1007 15:11:21.655392 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-log" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.655401 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-log" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.655622 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-api" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.655670 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" containerName="nova-api-log" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.658158 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.661048 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.661846 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.661997 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.663842 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.739889 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-logs\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.740264 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.740304 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxwkn\" (UniqueName: \"kubernetes.io/projected/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-kube-api-access-xxwkn\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.740336 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.740484 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-public-tls-certs\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.740565 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-config-data\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.842291 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-config-data\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.842390 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-logs\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.842419 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.842444 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxwkn\" (UniqueName: \"kubernetes.io/projected/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-kube-api-access-xxwkn\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.842473 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.842533 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-public-tls-certs\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.842887 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-logs\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.848814 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-public-tls-certs\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.848890 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.849458 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-config-data\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.849597 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.860980 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxwkn\" (UniqueName: \"kubernetes.io/projected/7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3-kube-api-access-xxwkn\") pod \"nova-api-0\" (UID: \"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3\") " pod="openstack/nova-api-0" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.903467 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="568e7e83-504b-46ff-b63b-39939c5cedae" path="/var/lib/kubelet/pods/568e7e83-504b-46ff-b63b-39939c5cedae/volumes" Oct 07 15:11:21 crc kubenswrapper[4672]: I1007 15:11:21.982769 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 15:11:22 crc kubenswrapper[4672]: W1007 15:11:22.448465 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7cc6827c_96b0_4d2c_a45a_6b11d20e6ee3.slice/crio-77b892101498f948494327c5f763e18994cab2b857cfae34a1ac3e4023fd80db WatchSource:0}: Error finding container 77b892101498f948494327c5f763e18994cab2b857cfae34a1ac3e4023fd80db: Status 404 returned error can't find the container with id 77b892101498f948494327c5f763e18994cab2b857cfae34a1ac3e4023fd80db Oct 07 15:11:22 crc kubenswrapper[4672]: I1007 15:11:22.451158 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 15:11:22 crc kubenswrapper[4672]: I1007 15:11:22.576809 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3","Type":"ContainerStarted","Data":"77b892101498f948494327c5f763e18994cab2b857cfae34a1ac3e4023fd80db"} Oct 07 15:11:23 crc kubenswrapper[4672]: I1007 15:11:23.589491 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3","Type":"ContainerStarted","Data":"955abbde0c5801b370489ea20aa76de000d699ed85abfd4b812f3f58e0bce5bf"} Oct 07 15:11:23 crc kubenswrapper[4672]: I1007 15:11:23.589962 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3","Type":"ContainerStarted","Data":"7df62db24fd6850ca75465717bbc396ddd5a3e1a7da95b33e53a60daf7d04744"} Oct 07 15:11:23 crc kubenswrapper[4672]: I1007 15:11:23.593872 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35a2c542-5b6e-4828-a670-fc4345572dc0","Type":"ContainerStarted","Data":"525a1d410c5b530929dec213cbe57ee9e2ea657159b41f889c49b0f04b1f9431"} Oct 07 15:11:23 crc kubenswrapper[4672]: I1007 15:11:23.594065 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 15:11:23 crc kubenswrapper[4672]: I1007 15:11:23.616829 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.616809603 podStartE2EDuration="2.616809603s" podCreationTimestamp="2025-10-07 15:11:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:11:23.611330074 +0000 UTC m=+1360.586508665" watchObservedRunningTime="2025-10-07 15:11:23.616809603 +0000 UTC m=+1360.591988184" Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.186747 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.204961 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.814266782 podStartE2EDuration="6.204944832s" podCreationTimestamp="2025-10-07 15:11:19 +0000 UTC" firstStartedPulling="2025-10-07 15:11:20.411770394 +0000 UTC m=+1357.386948975" lastFinishedPulling="2025-10-07 15:11:22.802448444 +0000 UTC m=+1359.777627025" observedRunningTime="2025-10-07 15:11:23.631529672 +0000 UTC m=+1360.606708263" watchObservedRunningTime="2025-10-07 15:11:25.204944832 +0000 UTC m=+1362.180123413" Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.250319 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-597bd8d975-qqr42"] Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.250684 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" podUID="2796a4c8-8ad4-4f4d-b7c8-11c69815514a" containerName="dnsmasq-dns" containerID="cri-o://0df6b9cca22acf7ed6ebd4284994c08d93e0bfb73a173003139991d8233f8903" gracePeriod=10 Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.625397 4672 generic.go:334] "Generic (PLEG): container finished" podID="2796a4c8-8ad4-4f4d-b7c8-11c69815514a" containerID="0df6b9cca22acf7ed6ebd4284994c08d93e0bfb73a173003139991d8233f8903" exitCode=0 Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.625499 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" event={"ID":"2796a4c8-8ad4-4f4d-b7c8-11c69815514a","Type":"ContainerDied","Data":"0df6b9cca22acf7ed6ebd4284994c08d93e0bfb73a173003139991d8233f8903"} Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.824240 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.921984 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92d2g\" (UniqueName: \"kubernetes.io/projected/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-kube-api-access-92d2g\") pod \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.922117 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-swift-storage-0\") pod \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.922154 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-nb\") pod \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.922198 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-config\") pod \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.922231 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-svc\") pod \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.922361 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-sb\") pod \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\" (UID: \"2796a4c8-8ad4-4f4d-b7c8-11c69815514a\") " Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.929903 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-kube-api-access-92d2g" (OuterVolumeSpecName: "kube-api-access-92d2g") pod "2796a4c8-8ad4-4f4d-b7c8-11c69815514a" (UID: "2796a4c8-8ad4-4f4d-b7c8-11c69815514a"). InnerVolumeSpecName "kube-api-access-92d2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.988852 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2796a4c8-8ad4-4f4d-b7c8-11c69815514a" (UID: "2796a4c8-8ad4-4f4d-b7c8-11c69815514a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:11:25 crc kubenswrapper[4672]: I1007 15:11:25.990687 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2796a4c8-8ad4-4f4d-b7c8-11c69815514a" (UID: "2796a4c8-8ad4-4f4d-b7c8-11c69815514a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.005799 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-config" (OuterVolumeSpecName: "config") pod "2796a4c8-8ad4-4f4d-b7c8-11c69815514a" (UID: "2796a4c8-8ad4-4f4d-b7c8-11c69815514a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.008198 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2796a4c8-8ad4-4f4d-b7c8-11c69815514a" (UID: "2796a4c8-8ad4-4f4d-b7c8-11c69815514a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.023822 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2796a4c8-8ad4-4f4d-b7c8-11c69815514a" (UID: "2796a4c8-8ad4-4f4d-b7c8-11c69815514a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.025338 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.025378 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92d2g\" (UniqueName: \"kubernetes.io/projected/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-kube-api-access-92d2g\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.025399 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.025412 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.025428 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.025444 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2796a4c8-8ad4-4f4d-b7c8-11c69815514a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.638955 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" event={"ID":"2796a4c8-8ad4-4f4d-b7c8-11c69815514a","Type":"ContainerDied","Data":"b787118222eb277c1280eecc5e005c02e6e7706c9f55cff672329e3f8edbc74f"} Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.639308 4672 scope.go:117] "RemoveContainer" containerID="0df6b9cca22acf7ed6ebd4284994c08d93e0bfb73a173003139991d8233f8903" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.639095 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-597bd8d975-qqr42" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.650525 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.650591 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.650635 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.651674 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c72fdf37ef4e6a98125f2c906a887f33d0d50801813254b3dbe632e126d1639a"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.651736 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://c72fdf37ef4e6a98125f2c906a887f33d0d50801813254b3dbe632e126d1639a" gracePeriod=600 Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.670465 4672 scope.go:117] "RemoveContainer" containerID="0012f455e7b087748586b7bf637920fd7c137c3f86b41630f532099b0d00d174" Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.698358 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-597bd8d975-qqr42"] Oct 07 15:11:26 crc kubenswrapper[4672]: I1007 15:11:26.711875 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-597bd8d975-qqr42"] Oct 07 15:11:27 crc kubenswrapper[4672]: I1007 15:11:27.652615 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="c72fdf37ef4e6a98125f2c906a887f33d0d50801813254b3dbe632e126d1639a" exitCode=0 Oct 07 15:11:27 crc kubenswrapper[4672]: I1007 15:11:27.653102 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"c72fdf37ef4e6a98125f2c906a887f33d0d50801813254b3dbe632e126d1639a"} Oct 07 15:11:27 crc kubenswrapper[4672]: I1007 15:11:27.653137 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429"} Oct 07 15:11:27 crc kubenswrapper[4672]: I1007 15:11:27.653159 4672 scope.go:117] "RemoveContainer" containerID="8e961035363bda8e811ffc1c695a7f2039c7606ff629f74fa31612bebfcadced" Oct 07 15:11:27 crc kubenswrapper[4672]: I1007 15:11:27.902493 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2796a4c8-8ad4-4f4d-b7c8-11c69815514a" path="/var/lib/kubelet/pods/2796a4c8-8ad4-4f4d-b7c8-11c69815514a/volumes" Oct 07 15:11:31 crc kubenswrapper[4672]: I1007 15:11:31.984134 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 15:11:31 crc kubenswrapper[4672]: I1007 15:11:31.984695 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 15:11:33 crc kubenswrapper[4672]: I1007 15:11:33.000265 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 15:11:33 crc kubenswrapper[4672]: I1007 15:11:33.000355 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 15:11:41 crc kubenswrapper[4672]: I1007 15:11:41.992041 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 15:11:41 crc kubenswrapper[4672]: I1007 15:11:41.992715 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 15:11:41 crc kubenswrapper[4672]: I1007 15:11:41.993199 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 15:11:41 crc kubenswrapper[4672]: I1007 15:11:41.993223 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 15:11:41 crc kubenswrapper[4672]: I1007 15:11:41.999800 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 15:11:42 crc kubenswrapper[4672]: I1007 15:11:42.001885 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 15:11:49 crc kubenswrapper[4672]: I1007 15:11:49.934645 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 07 15:11:59 crc kubenswrapper[4672]: I1007 15:11:59.744696 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 15:12:01 crc kubenswrapper[4672]: I1007 15:12:01.182749 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 15:12:03 crc kubenswrapper[4672]: I1007 15:12:03.859919 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="d4a0a2c2-a878-473c-86da-5a74aa392982" containerName="rabbitmq" containerID="cri-o://7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40" gracePeriod=604796 Oct 07 15:12:05 crc kubenswrapper[4672]: I1007 15:12:05.103389 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="1afcb6ce-1241-4930-8639-bee8a9a76d11" containerName="rabbitmq" containerID="cri-o://fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723" gracePeriod=604797 Oct 07 15:12:07 crc kubenswrapper[4672]: I1007 15:12:07.026745 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="1afcb6ce-1241-4930-8639-bee8a9a76d11" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Oct 07 15:12:07 crc kubenswrapper[4672]: I1007 15:12:07.075431 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d4a0a2c2-a878-473c-86da-5a74aa392982" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.481195 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658308 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-tls\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658379 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4a0a2c2-a878-473c-86da-5a74aa392982-erlang-cookie-secret\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658401 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-plugins-conf\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658437 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-plugins\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658476 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wplcs\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-kube-api-access-wplcs\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658522 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-erlang-cookie\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658598 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-confd\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658614 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658638 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-config-data\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658655 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-server-conf\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.658743 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4a0a2c2-a878-473c-86da-5a74aa392982-pod-info\") pod \"d4a0a2c2-a878-473c-86da-5a74aa392982\" (UID: \"d4a0a2c2-a878-473c-86da-5a74aa392982\") " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.659069 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.659225 4672 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.659855 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.663445 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.665115 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-kube-api-access-wplcs" (OuterVolumeSpecName: "kube-api-access-wplcs") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "kube-api-access-wplcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.665646 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.669642 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.675425 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a0a2c2-a878-473c-86da-5a74aa392982-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.676052 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d4a0a2c2-a878-473c-86da-5a74aa392982-pod-info" (OuterVolumeSpecName: "pod-info") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.714343 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-config-data" (OuterVolumeSpecName: "config-data") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.733690 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-server-conf" (OuterVolumeSpecName: "server-conf") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.761053 4672 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.761089 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.761103 4672 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-server-conf\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.761112 4672 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4a0a2c2-a878-473c-86da-5a74aa392982-pod-info\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.761123 4672 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.761136 4672 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4a0a2c2-a878-473c-86da-5a74aa392982-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.761147 4672 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4a0a2c2-a878-473c-86da-5a74aa392982-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.761159 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wplcs\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-kube-api-access-wplcs\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.761172 4672 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.795289 4672 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.810302 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d4a0a2c2-a878-473c-86da-5a74aa392982" (UID: "d4a0a2c2-a878-473c-86da-5a74aa392982"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.869686 4672 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4a0a2c2-a878-473c-86da-5a74aa392982-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:10 crc kubenswrapper[4672]: I1007 15:12:10.870478 4672 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.086778 4672 generic.go:334] "Generic (PLEG): container finished" podID="d4a0a2c2-a878-473c-86da-5a74aa392982" containerID="7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40" exitCode=0 Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.086934 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4a0a2c2-a878-473c-86da-5a74aa392982","Type":"ContainerDied","Data":"7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40"} Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.087123 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4a0a2c2-a878-473c-86da-5a74aa392982","Type":"ContainerDied","Data":"2c763752a776c90044662cd05f25905b0c965c8dd62a72cb18ac7ecbd817cf9e"} Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.087145 4672 scope.go:117] "RemoveContainer" containerID="7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.087006 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.157607 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.162243 4672 scope.go:117] "RemoveContainer" containerID="35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.168302 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.190557 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 15:12:11 crc kubenswrapper[4672]: E1007 15:12:11.191095 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a0a2c2-a878-473c-86da-5a74aa392982" containerName="setup-container" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.191118 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a0a2c2-a878-473c-86da-5a74aa392982" containerName="setup-container" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.191235 4672 scope.go:117] "RemoveContainer" containerID="7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40" Oct 07 15:12:11 crc kubenswrapper[4672]: E1007 15:12:11.191397 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2796a4c8-8ad4-4f4d-b7c8-11c69815514a" containerName="dnsmasq-dns" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.191464 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2796a4c8-8ad4-4f4d-b7c8-11c69815514a" containerName="dnsmasq-dns" Oct 07 15:12:11 crc kubenswrapper[4672]: E1007 15:12:11.191534 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a0a2c2-a878-473c-86da-5a74aa392982" containerName="rabbitmq" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.191546 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a0a2c2-a878-473c-86da-5a74aa392982" containerName="rabbitmq" Oct 07 15:12:11 crc kubenswrapper[4672]: E1007 15:12:11.191560 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2796a4c8-8ad4-4f4d-b7c8-11c69815514a" containerName="init" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.191567 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2796a4c8-8ad4-4f4d-b7c8-11c69815514a" containerName="init" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.191948 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="2796a4c8-8ad4-4f4d-b7c8-11c69815514a" containerName="dnsmasq-dns" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.191978 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a0a2c2-a878-473c-86da-5a74aa392982" containerName="rabbitmq" Oct 07 15:12:11 crc kubenswrapper[4672]: E1007 15:12:11.193963 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40\": container with ID starting with 7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40 not found: ID does not exist" containerID="7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.194034 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40"} err="failed to get container status \"7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40\": rpc error: code = NotFound desc = could not find container \"7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40\": container with ID starting with 7e802d71e87eec082ca8521b2bf5cdf859e3ab43f1257f5e993a66a706c7ca40 not found: ID does not exist" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.194067 4672 scope.go:117] "RemoveContainer" containerID="35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9" Oct 07 15:12:11 crc kubenswrapper[4672]: E1007 15:12:11.194384 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9\": container with ID starting with 35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9 not found: ID does not exist" containerID="35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.194417 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9"} err="failed to get container status \"35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9\": rpc error: code = NotFound desc = could not find container \"35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9\": container with ID starting with 35c904e53599e6dbd56e439a78aa68fc9c4182ab8d7144305079215de7c231e9 not found: ID does not exist" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.197765 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.200604 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.200685 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-5nz5c" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.200614 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.204038 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.211206 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.211309 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.212219 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.214472 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.380507 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6915154f-a1f8-4f93-8a8b-00020a761f95-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.380559 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6915154f-a1f8-4f93-8a8b-00020a761f95-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.380620 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgsch\" (UniqueName: \"kubernetes.io/projected/6915154f-a1f8-4f93-8a8b-00020a761f95-kube-api-access-jgsch\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.380779 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6915154f-a1f8-4f93-8a8b-00020a761f95-config-data\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.380939 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.381070 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.381130 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.381180 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.381213 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.381230 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6915154f-a1f8-4f93-8a8b-00020a761f95-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.381269 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6915154f-a1f8-4f93-8a8b-00020a761f95-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.482767 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgsch\" (UniqueName: \"kubernetes.io/projected/6915154f-a1f8-4f93-8a8b-00020a761f95-kube-api-access-jgsch\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.482838 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6915154f-a1f8-4f93-8a8b-00020a761f95-config-data\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.482890 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.482916 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.482934 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.482954 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.482972 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.482985 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6915154f-a1f8-4f93-8a8b-00020a761f95-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.483043 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6915154f-a1f8-4f93-8a8b-00020a761f95-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.483068 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6915154f-a1f8-4f93-8a8b-00020a761f95-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.483098 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6915154f-a1f8-4f93-8a8b-00020a761f95-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.484543 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.486705 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.488789 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6915154f-a1f8-4f93-8a8b-00020a761f95-config-data\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.488789 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6915154f-a1f8-4f93-8a8b-00020a761f95-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.489052 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.490789 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6915154f-a1f8-4f93-8a8b-00020a761f95-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.491221 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6915154f-a1f8-4f93-8a8b-00020a761f95-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.494875 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6915154f-a1f8-4f93-8a8b-00020a761f95-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.495686 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.500581 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6915154f-a1f8-4f93-8a8b-00020a761f95-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.523759 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgsch\" (UniqueName: \"kubernetes.io/projected/6915154f-a1f8-4f93-8a8b-00020a761f95-kube-api-access-jgsch\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.540791 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"6915154f-a1f8-4f93-8a8b-00020a761f95\") " pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.681274 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789487 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-tls\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789555 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789677 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1afcb6ce-1241-4930-8639-bee8a9a76d11-pod-info\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789735 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-plugins-conf\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789770 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-confd\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789814 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1afcb6ce-1241-4930-8639-bee8a9a76d11-erlang-cookie-secret\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789832 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-erlang-cookie\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789858 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-config-data\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789878 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-server-conf\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789943 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-plugins\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.789973 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwjjb\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-kube-api-access-wwjjb\") pod \"1afcb6ce-1241-4930-8639-bee8a9a76d11\" (UID: \"1afcb6ce-1241-4930-8639-bee8a9a76d11\") " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.790467 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.794241 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.794874 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.796419 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.797830 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1afcb6ce-1241-4930-8639-bee8a9a76d11-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.797895 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/1afcb6ce-1241-4930-8639-bee8a9a76d11-pod-info" (OuterVolumeSpecName: "pod-info") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.799160 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-kube-api-access-wwjjb" (OuterVolumeSpecName: "kube-api-access-wwjjb") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "kube-api-access-wwjjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.799189 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.815638 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-config-data" (OuterVolumeSpecName: "config-data") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.836227 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.853268 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-server-conf" (OuterVolumeSpecName: "server-conf") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893865 4672 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893907 4672 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893917 4672 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1afcb6ce-1241-4930-8639-bee8a9a76d11-pod-info\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893925 4672 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893936 4672 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1afcb6ce-1241-4930-8639-bee8a9a76d11-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893946 4672 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893954 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893962 4672 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1afcb6ce-1241-4930-8639-bee8a9a76d11-server-conf\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893970 4672 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.893981 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwjjb\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-kube-api-access-wwjjb\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.910697 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4a0a2c2-a878-473c-86da-5a74aa392982" path="/var/lib/kubelet/pods/d4a0a2c2-a878-473c-86da-5a74aa392982/volumes" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.931357 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "1afcb6ce-1241-4930-8639-bee8a9a76d11" (UID: "1afcb6ce-1241-4930-8639-bee8a9a76d11"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.940615 4672 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.999184 4672 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:11 crc kubenswrapper[4672]: I1007 15:12:11.999221 4672 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1afcb6ce-1241-4930-8639-bee8a9a76d11-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.107077 4672 generic.go:334] "Generic (PLEG): container finished" podID="1afcb6ce-1241-4930-8639-bee8a9a76d11" containerID="fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723" exitCode=0 Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.107125 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1afcb6ce-1241-4930-8639-bee8a9a76d11","Type":"ContainerDied","Data":"fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723"} Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.107165 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1afcb6ce-1241-4930-8639-bee8a9a76d11","Type":"ContainerDied","Data":"3440494c5ecdc57072ba6c33aa923bf8b54b6824b1bcb73f3522fe2eba8842ce"} Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.107186 4672 scope.go:117] "RemoveContainer" containerID="fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.107301 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.176409 4672 scope.go:117] "RemoveContainer" containerID="00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.188107 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.219370 4672 scope.go:117] "RemoveContainer" containerID="fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723" Oct 07 15:12:12 crc kubenswrapper[4672]: E1007 15:12:12.223007 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723\": container with ID starting with fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723 not found: ID does not exist" containerID="fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.223105 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723"} err="failed to get container status \"fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723\": rpc error: code = NotFound desc = could not find container \"fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723\": container with ID starting with fd57f1156e9c9ebb3cfb970d82893e44c90dd6b28033930d871e647cb3736723 not found: ID does not exist" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.223146 4672 scope.go:117] "RemoveContainer" containerID="00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.226745 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 15:12:12 crc kubenswrapper[4672]: E1007 15:12:12.227266 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9\": container with ID starting with 00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9 not found: ID does not exist" containerID="00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.227861 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9"} err="failed to get container status \"00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9\": rpc error: code = NotFound desc = could not find container \"00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9\": container with ID starting with 00c84ecbd1d87419677950c02a9e65a8aa7f29e7e36f885388d7b857ecaac1b9 not found: ID does not exist" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.248650 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 15:12:12 crc kubenswrapper[4672]: E1007 15:12:12.249054 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1afcb6ce-1241-4930-8639-bee8a9a76d11" containerName="setup-container" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.249068 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1afcb6ce-1241-4930-8639-bee8a9a76d11" containerName="setup-container" Oct 07 15:12:12 crc kubenswrapper[4672]: E1007 15:12:12.249097 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1afcb6ce-1241-4930-8639-bee8a9a76d11" containerName="rabbitmq" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.249103 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1afcb6ce-1241-4930-8639-bee8a9a76d11" containerName="rabbitmq" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.249274 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="1afcb6ce-1241-4930-8639-bee8a9a76d11" containerName="rabbitmq" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.250293 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.252091 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.252396 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.268934 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-q24n8" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.269220 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.269333 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.269463 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.269582 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.277989 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341162 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341468 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/60a532ee-7772-464b-8f0b-854377647a20-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341508 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/60a532ee-7772-464b-8f0b-854377647a20-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341529 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341563 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341580 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341594 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/60a532ee-7772-464b-8f0b-854377647a20-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341654 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341677 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d86d4\" (UniqueName: \"kubernetes.io/projected/60a532ee-7772-464b-8f0b-854377647a20-kube-api-access-d86d4\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341716 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/60a532ee-7772-464b-8f0b-854377647a20-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.341735 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/60a532ee-7772-464b-8f0b-854377647a20-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.371215 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.446025 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/60a532ee-7772-464b-8f0b-854377647a20-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.446327 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/60a532ee-7772-464b-8f0b-854377647a20-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.446411 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.446535 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.446703 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.446776 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/60a532ee-7772-464b-8f0b-854377647a20-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.446900 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.446981 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d86d4\" (UniqueName: \"kubernetes.io/projected/60a532ee-7772-464b-8f0b-854377647a20-kube-api-access-d86d4\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.447119 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/60a532ee-7772-464b-8f0b-854377647a20-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.447204 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/60a532ee-7772-464b-8f0b-854377647a20-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.447285 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.450111 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/60a532ee-7772-464b-8f0b-854377647a20-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.450446 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.450584 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.450916 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/60a532ee-7772-464b-8f0b-854377647a20-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.451310 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/60a532ee-7772-464b-8f0b-854377647a20-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.451617 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.452239 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/60a532ee-7772-464b-8f0b-854377647a20-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.462949 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.463593 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/60a532ee-7772-464b-8f0b-854377647a20-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.473901 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d86d4\" (UniqueName: \"kubernetes.io/projected/60a532ee-7772-464b-8f0b-854377647a20-kube-api-access-d86d4\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.474421 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/60a532ee-7772-464b-8f0b-854377647a20-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.487781 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"60a532ee-7772-464b-8f0b-854377647a20\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:12 crc kubenswrapper[4672]: I1007 15:12:12.575404 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:13 crc kubenswrapper[4672]: I1007 15:12:13.043792 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 15:12:13 crc kubenswrapper[4672]: W1007 15:12:13.049185 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60a532ee_7772_464b_8f0b_854377647a20.slice/crio-c8b0105e166bfdf080c2efcc2232471d523a5be1b97bcdf115f94713dec2766c WatchSource:0}: Error finding container c8b0105e166bfdf080c2efcc2232471d523a5be1b97bcdf115f94713dec2766c: Status 404 returned error can't find the container with id c8b0105e166bfdf080c2efcc2232471d523a5be1b97bcdf115f94713dec2766c Oct 07 15:12:13 crc kubenswrapper[4672]: I1007 15:12:13.117093 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6915154f-a1f8-4f93-8a8b-00020a761f95","Type":"ContainerStarted","Data":"fc839f2582339eb977f8df3e34bbfaeca03a01071cf8af7840694df3d4afb803"} Oct 07 15:12:13 crc kubenswrapper[4672]: I1007 15:12:13.119131 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"60a532ee-7772-464b-8f0b-854377647a20","Type":"ContainerStarted","Data":"c8b0105e166bfdf080c2efcc2232471d523a5be1b97bcdf115f94713dec2766c"} Oct 07 15:12:13 crc kubenswrapper[4672]: I1007 15:12:13.902725 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1afcb6ce-1241-4930-8639-bee8a9a76d11" path="/var/lib/kubelet/pods/1afcb6ce-1241-4930-8639-bee8a9a76d11/volumes" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.129778 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6915154f-a1f8-4f93-8a8b-00020a761f95","Type":"ContainerStarted","Data":"a1cd2d491a5141658b6ec6cc8a97bb402c7fb55cc046c620b91db7d771fbf721"} Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.595578 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fc8cdc75-hwt9l"] Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.597072 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.605434 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.628968 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fc8cdc75-hwt9l"] Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.685584 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-nb\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.685648 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-openstack-edpm-ipam\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.685787 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-swift-storage-0\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.685831 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-config\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.685895 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bxzh\" (UniqueName: \"kubernetes.io/projected/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-kube-api-access-4bxzh\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.685984 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-sb\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.686051 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-svc\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.692658 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fc8cdc75-hwt9l"] Oct 07 15:12:14 crc kubenswrapper[4672]: E1007 15:12:14.693871 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-4bxzh openstack-edpm-ipam ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" podUID="7b39b5de-1c42-47b1-8987-2c082ff4b5d8" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.746854 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-599dd5cb59-whmjd"] Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.749843 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.783041 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-599dd5cb59-whmjd"] Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788265 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-sb\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788325 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-svc\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788369 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-config\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788401 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-ovsdbserver-sb\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788431 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-dns-swift-storage-0\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788458 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-nb\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788487 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-openstack-edpm-ipam\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788562 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-dns-svc\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788614 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-swift-storage-0\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788641 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l58n8\" (UniqueName: \"kubernetes.io/projected/2645864b-ab57-47a0-8b17-478a93a55a7a-kube-api-access-l58n8\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788678 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-config\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788712 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-ovsdbserver-nb\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788756 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-openstack-edpm-ipam\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.788797 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bxzh\" (UniqueName: \"kubernetes.io/projected/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-kube-api-access-4bxzh\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.790569 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-sb\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.791312 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-svc\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.792130 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-nb\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.792414 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-swift-storage-0\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.792873 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-config\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.793392 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-openstack-edpm-ipam\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.827445 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bxzh\" (UniqueName: \"kubernetes.io/projected/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-kube-api-access-4bxzh\") pod \"dnsmasq-dns-fc8cdc75-hwt9l\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.889553 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-config\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.889617 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-ovsdbserver-sb\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.889650 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-dns-swift-storage-0\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.889725 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-dns-svc\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.889770 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l58n8\" (UniqueName: \"kubernetes.io/projected/2645864b-ab57-47a0-8b17-478a93a55a7a-kube-api-access-l58n8\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.889809 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-ovsdbserver-nb\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.889842 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-openstack-edpm-ipam\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.891226 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-openstack-edpm-ipam\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.891240 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-dns-svc\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.891315 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-config\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.891439 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-dns-swift-storage-0\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.891956 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-ovsdbserver-nb\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.892010 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2645864b-ab57-47a0-8b17-478a93a55a7a-ovsdbserver-sb\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:14 crc kubenswrapper[4672]: I1007 15:12:14.907989 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l58n8\" (UniqueName: \"kubernetes.io/projected/2645864b-ab57-47a0-8b17-478a93a55a7a-kube-api-access-l58n8\") pod \"dnsmasq-dns-599dd5cb59-whmjd\" (UID: \"2645864b-ab57-47a0-8b17-478a93a55a7a\") " pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.080704 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.146976 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"60a532ee-7772-464b-8f0b-854377647a20","Type":"ContainerStarted","Data":"23310f37f4906beb87f3b35105b8fb2ba22324b1afef4c6f2a9e35f843d0cc4a"} Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.147008 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.202413 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.295708 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-sb\") pod \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.295894 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-swift-storage-0\") pod \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.295925 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-svc\") pod \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.295944 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-nb\") pod \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.295958 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-openstack-edpm-ipam\") pod \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.295986 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bxzh\" (UniqueName: \"kubernetes.io/projected/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-kube-api-access-4bxzh\") pod \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.296047 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-config\") pod \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\" (UID: \"7b39b5de-1c42-47b1-8987-2c082ff4b5d8\") " Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.296437 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7b39b5de-1c42-47b1-8987-2c082ff4b5d8" (UID: "7b39b5de-1c42-47b1-8987-2c082ff4b5d8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.296505 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7b39b5de-1c42-47b1-8987-2c082ff4b5d8" (UID: "7b39b5de-1c42-47b1-8987-2c082ff4b5d8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.296663 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7b39b5de-1c42-47b1-8987-2c082ff4b5d8" (UID: "7b39b5de-1c42-47b1-8987-2c082ff4b5d8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.296826 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-config" (OuterVolumeSpecName: "config") pod "7b39b5de-1c42-47b1-8987-2c082ff4b5d8" (UID: "7b39b5de-1c42-47b1-8987-2c082ff4b5d8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.296841 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7b39b5de-1c42-47b1-8987-2c082ff4b5d8" (UID: "7b39b5de-1c42-47b1-8987-2c082ff4b5d8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.297540 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "7b39b5de-1c42-47b1-8987-2c082ff4b5d8" (UID: "7b39b5de-1c42-47b1-8987-2c082ff4b5d8"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.298419 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.298446 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.298459 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.298469 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.298481 4672 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.298493 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.301440 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-kube-api-access-4bxzh" (OuterVolumeSpecName: "kube-api-access-4bxzh") pod "7b39b5de-1c42-47b1-8987-2c082ff4b5d8" (UID: "7b39b5de-1c42-47b1-8987-2c082ff4b5d8"). InnerVolumeSpecName "kube-api-access-4bxzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.399906 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bxzh\" (UniqueName: \"kubernetes.io/projected/7b39b5de-1c42-47b1-8987-2c082ff4b5d8-kube-api-access-4bxzh\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:15 crc kubenswrapper[4672]: I1007 15:12:15.538477 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-599dd5cb59-whmjd"] Oct 07 15:12:15 crc kubenswrapper[4672]: W1007 15:12:15.539330 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2645864b_ab57_47a0_8b17_478a93a55a7a.slice/crio-6214d7de0c9903b04079b640da4b5062b6a9d58577fd3e5e4d0a31932400b747 WatchSource:0}: Error finding container 6214d7de0c9903b04079b640da4b5062b6a9d58577fd3e5e4d0a31932400b747: Status 404 returned error can't find the container with id 6214d7de0c9903b04079b640da4b5062b6a9d58577fd3e5e4d0a31932400b747 Oct 07 15:12:16 crc kubenswrapper[4672]: I1007 15:12:16.161421 4672 generic.go:334] "Generic (PLEG): container finished" podID="2645864b-ab57-47a0-8b17-478a93a55a7a" containerID="39e1a7b1c0e588702e85e37e3a95c67b19451b1ab55bcf975e595fd52bd77587" exitCode=0 Oct 07 15:12:16 crc kubenswrapper[4672]: I1007 15:12:16.161538 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" event={"ID":"2645864b-ab57-47a0-8b17-478a93a55a7a","Type":"ContainerDied","Data":"39e1a7b1c0e588702e85e37e3a95c67b19451b1ab55bcf975e595fd52bd77587"} Oct 07 15:12:16 crc kubenswrapper[4672]: I1007 15:12:16.161800 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" event={"ID":"2645864b-ab57-47a0-8b17-478a93a55a7a","Type":"ContainerStarted","Data":"6214d7de0c9903b04079b640da4b5062b6a9d58577fd3e5e4d0a31932400b747"} Oct 07 15:12:16 crc kubenswrapper[4672]: I1007 15:12:16.161828 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc8cdc75-hwt9l" Oct 07 15:12:16 crc kubenswrapper[4672]: I1007 15:12:16.247252 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fc8cdc75-hwt9l"] Oct 07 15:12:16 crc kubenswrapper[4672]: I1007 15:12:16.262405 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fc8cdc75-hwt9l"] Oct 07 15:12:17 crc kubenswrapper[4672]: I1007 15:12:17.173946 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" event={"ID":"2645864b-ab57-47a0-8b17-478a93a55a7a","Type":"ContainerStarted","Data":"870b7081a50f0cbd2e00e48b0296013b953589bed7a0b845d7455dcfc41aaed4"} Oct 07 15:12:17 crc kubenswrapper[4672]: I1007 15:12:17.174433 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:17 crc kubenswrapper[4672]: I1007 15:12:17.201059 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" podStartSLOduration=3.201011443 podStartE2EDuration="3.201011443s" podCreationTimestamp="2025-10-07 15:12:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:12:17.191359652 +0000 UTC m=+1414.166538253" watchObservedRunningTime="2025-10-07 15:12:17.201011443 +0000 UTC m=+1414.176190024" Oct 07 15:12:17 crc kubenswrapper[4672]: I1007 15:12:17.903352 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b39b5de-1c42-47b1-8987-2c082ff4b5d8" path="/var/lib/kubelet/pods/7b39b5de-1c42-47b1-8987-2c082ff4b5d8/volumes" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.088180 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-599dd5cb59-whmjd" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.190567 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c66bcfb47-hzd7c"] Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.190841 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" podUID="b4c79046-59e3-414e-ac5a-0bb51f43187c" containerName="dnsmasq-dns" containerID="cri-o://5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02" gracePeriod=10 Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.720642 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.816604 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6jkk\" (UniqueName: \"kubernetes.io/projected/b4c79046-59e3-414e-ac5a-0bb51f43187c-kube-api-access-w6jkk\") pod \"b4c79046-59e3-414e-ac5a-0bb51f43187c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.816919 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-nb\") pod \"b4c79046-59e3-414e-ac5a-0bb51f43187c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.817116 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-swift-storage-0\") pod \"b4c79046-59e3-414e-ac5a-0bb51f43187c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.818052 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-sb\") pod \"b4c79046-59e3-414e-ac5a-0bb51f43187c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.818123 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-config\") pod \"b4c79046-59e3-414e-ac5a-0bb51f43187c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.818226 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-svc\") pod \"b4c79046-59e3-414e-ac5a-0bb51f43187c\" (UID: \"b4c79046-59e3-414e-ac5a-0bb51f43187c\") " Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.825593 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4c79046-59e3-414e-ac5a-0bb51f43187c-kube-api-access-w6jkk" (OuterVolumeSpecName: "kube-api-access-w6jkk") pod "b4c79046-59e3-414e-ac5a-0bb51f43187c" (UID: "b4c79046-59e3-414e-ac5a-0bb51f43187c"). InnerVolumeSpecName "kube-api-access-w6jkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.872355 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b4c79046-59e3-414e-ac5a-0bb51f43187c" (UID: "b4c79046-59e3-414e-ac5a-0bb51f43187c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.872376 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-config" (OuterVolumeSpecName: "config") pod "b4c79046-59e3-414e-ac5a-0bb51f43187c" (UID: "b4c79046-59e3-414e-ac5a-0bb51f43187c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.872564 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b4c79046-59e3-414e-ac5a-0bb51f43187c" (UID: "b4c79046-59e3-414e-ac5a-0bb51f43187c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.878975 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b4c79046-59e3-414e-ac5a-0bb51f43187c" (UID: "b4c79046-59e3-414e-ac5a-0bb51f43187c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.880379 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b4c79046-59e3-414e-ac5a-0bb51f43187c" (UID: "b4c79046-59e3-414e-ac5a-0bb51f43187c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.921366 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6jkk\" (UniqueName: \"kubernetes.io/projected/b4c79046-59e3-414e-ac5a-0bb51f43187c-kube-api-access-w6jkk\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.921408 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.921421 4672 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.921432 4672 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.921446 4672 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:25 crc kubenswrapper[4672]: I1007 15:12:25.921458 4672 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4c79046-59e3-414e-ac5a-0bb51f43187c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.252985 4672 generic.go:334] "Generic (PLEG): container finished" podID="b4c79046-59e3-414e-ac5a-0bb51f43187c" containerID="5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02" exitCode=0 Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.253087 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.254179 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" event={"ID":"b4c79046-59e3-414e-ac5a-0bb51f43187c","Type":"ContainerDied","Data":"5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02"} Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.254313 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c66bcfb47-hzd7c" event={"ID":"b4c79046-59e3-414e-ac5a-0bb51f43187c","Type":"ContainerDied","Data":"17426234c7e7a7ed11ee8e9f2e30df192ae897b8a05a5bade545ee518bb43ad1"} Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.254394 4672 scope.go:117] "RemoveContainer" containerID="5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02" Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.284745 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c66bcfb47-hzd7c"] Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.294438 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c66bcfb47-hzd7c"] Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.296075 4672 scope.go:117] "RemoveContainer" containerID="9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47" Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.324380 4672 scope.go:117] "RemoveContainer" containerID="5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02" Oct 07 15:12:26 crc kubenswrapper[4672]: E1007 15:12:26.326310 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02\": container with ID starting with 5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02 not found: ID does not exist" containerID="5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02" Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.326346 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02"} err="failed to get container status \"5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02\": rpc error: code = NotFound desc = could not find container \"5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02\": container with ID starting with 5a93764342180da667cc1b28cc6c45cde763d9bd2fd2752c18ab990d427fde02 not found: ID does not exist" Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.326370 4672 scope.go:117] "RemoveContainer" containerID="9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47" Oct 07 15:12:26 crc kubenswrapper[4672]: E1007 15:12:26.326689 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47\": container with ID starting with 9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47 not found: ID does not exist" containerID="9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47" Oct 07 15:12:26 crc kubenswrapper[4672]: I1007 15:12:26.326719 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47"} err="failed to get container status \"9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47\": rpc error: code = NotFound desc = could not find container \"9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47\": container with ID starting with 9c6f1743bc81d978855ed314c53bcd8500af677ca6db3dfb1e15833ad49c5b47 not found: ID does not exist" Oct 07 15:12:27 crc kubenswrapper[4672]: I1007 15:12:27.903484 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4c79046-59e3-414e-ac5a-0bb51f43187c" path="/var/lib/kubelet/pods/b4c79046-59e3-414e-ac5a-0bb51f43187c/volumes" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.669528 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-crw4m"] Oct 07 15:12:32 crc kubenswrapper[4672]: E1007 15:12:32.670654 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4c79046-59e3-414e-ac5a-0bb51f43187c" containerName="init" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.670674 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4c79046-59e3-414e-ac5a-0bb51f43187c" containerName="init" Oct 07 15:12:32 crc kubenswrapper[4672]: E1007 15:12:32.670717 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4c79046-59e3-414e-ac5a-0bb51f43187c" containerName="dnsmasq-dns" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.670726 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4c79046-59e3-414e-ac5a-0bb51f43187c" containerName="dnsmasq-dns" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.671004 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4c79046-59e3-414e-ac5a-0bb51f43187c" containerName="dnsmasq-dns" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.678318 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.681856 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-crw4m"] Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.750971 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-utilities\") pod \"redhat-operators-crw4m\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.751355 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w9lb\" (UniqueName: \"kubernetes.io/projected/dfe31691-5af9-4d45-a35c-58f41e7dbb60-kube-api-access-9w9lb\") pod \"redhat-operators-crw4m\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.751698 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-catalog-content\") pod \"redhat-operators-crw4m\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.853535 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w9lb\" (UniqueName: \"kubernetes.io/projected/dfe31691-5af9-4d45-a35c-58f41e7dbb60-kube-api-access-9w9lb\") pod \"redhat-operators-crw4m\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.854107 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-catalog-content\") pod \"redhat-operators-crw4m\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.854625 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-catalog-content\") pod \"redhat-operators-crw4m\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.854826 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-utilities\") pod \"redhat-operators-crw4m\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.855183 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-utilities\") pod \"redhat-operators-crw4m\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:32 crc kubenswrapper[4672]: I1007 15:12:32.874804 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w9lb\" (UniqueName: \"kubernetes.io/projected/dfe31691-5af9-4d45-a35c-58f41e7dbb60-kube-api-access-9w9lb\") pod \"redhat-operators-crw4m\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:33 crc kubenswrapper[4672]: I1007 15:12:33.007936 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.759996 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w"] Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.761710 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.766125 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.766344 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.766373 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.766668 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.775297 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w"] Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.874042 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.874138 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qwrb\" (UniqueName: \"kubernetes.io/projected/fa79d159-5cc6-405e-8725-d3f49e9f75f8-kube-api-access-6qwrb\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.874194 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.874216 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.975979 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qwrb\" (UniqueName: \"kubernetes.io/projected/fa79d159-5cc6-405e-8725-d3f49e9f75f8-kube-api-access-6qwrb\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.976049 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.976096 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.976220 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.982242 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.982749 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.983686 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:33.994591 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qwrb\" (UniqueName: \"kubernetes.io/projected/fa79d159-5cc6-405e-8725-d3f49e9f75f8-kube-api-access-6qwrb\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:34.087190 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:34 crc kubenswrapper[4672]: W1007 15:12:34.885574 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfe31691_5af9_4d45_a35c_58f41e7dbb60.slice/crio-7834500cef88559d39b4a912f6717cea43fe10fb4611a049bad67cd06a1e4e67 WatchSource:0}: Error finding container 7834500cef88559d39b4a912f6717cea43fe10fb4611a049bad67cd06a1e4e67: Status 404 returned error can't find the container with id 7834500cef88559d39b4a912f6717cea43fe10fb4611a049bad67cd06a1e4e67 Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:34.886165 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-crw4m"] Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:34.970701 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w"] Oct 07 15:12:34 crc kubenswrapper[4672]: W1007 15:12:34.975219 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa79d159_5cc6_405e_8725_d3f49e9f75f8.slice/crio-a13692bde09ffb1550ece78cc8e370e2b400a35dfb48783ad06c7249d39df72a WatchSource:0}: Error finding container a13692bde09ffb1550ece78cc8e370e2b400a35dfb48783ad06c7249d39df72a: Status 404 returned error can't find the container with id a13692bde09ffb1550ece78cc8e370e2b400a35dfb48783ad06c7249d39df72a Oct 07 15:12:34 crc kubenswrapper[4672]: I1007 15:12:34.977551 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 15:12:35 crc kubenswrapper[4672]: I1007 15:12:35.332325 4672 generic.go:334] "Generic (PLEG): container finished" podID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerID="f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9" exitCode=0 Oct 07 15:12:35 crc kubenswrapper[4672]: I1007 15:12:35.332421 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crw4m" event={"ID":"dfe31691-5af9-4d45-a35c-58f41e7dbb60","Type":"ContainerDied","Data":"f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9"} Oct 07 15:12:35 crc kubenswrapper[4672]: I1007 15:12:35.332467 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crw4m" event={"ID":"dfe31691-5af9-4d45-a35c-58f41e7dbb60","Type":"ContainerStarted","Data":"7834500cef88559d39b4a912f6717cea43fe10fb4611a049bad67cd06a1e4e67"} Oct 07 15:12:35 crc kubenswrapper[4672]: I1007 15:12:35.333438 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" event={"ID":"fa79d159-5cc6-405e-8725-d3f49e9f75f8","Type":"ContainerStarted","Data":"a13692bde09ffb1550ece78cc8e370e2b400a35dfb48783ad06c7249d39df72a"} Oct 07 15:12:35 crc kubenswrapper[4672]: E1007 15:12:35.374657 4672 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfe31691_5af9_4d45_a35c_58f41e7dbb60.slice/crio-conmon-f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfe31691_5af9_4d45_a35c_58f41e7dbb60.slice/crio-f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9.scope\": RecentStats: unable to find data in memory cache]" Oct 07 15:12:37 crc kubenswrapper[4672]: I1007 15:12:37.357926 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crw4m" event={"ID":"dfe31691-5af9-4d45-a35c-58f41e7dbb60","Type":"ContainerStarted","Data":"b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8"} Oct 07 15:12:38 crc kubenswrapper[4672]: I1007 15:12:38.368665 4672 generic.go:334] "Generic (PLEG): container finished" podID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerID="b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8" exitCode=0 Oct 07 15:12:38 crc kubenswrapper[4672]: I1007 15:12:38.370101 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crw4m" event={"ID":"dfe31691-5af9-4d45-a35c-58f41e7dbb60","Type":"ContainerDied","Data":"b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8"} Oct 07 15:12:43 crc kubenswrapper[4672]: I1007 15:12:43.877590 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:12:44 crc kubenswrapper[4672]: I1007 15:12:44.424459 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crw4m" event={"ID":"dfe31691-5af9-4d45-a35c-58f41e7dbb60","Type":"ContainerStarted","Data":"21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be"} Oct 07 15:12:44 crc kubenswrapper[4672]: I1007 15:12:44.426591 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" event={"ID":"fa79d159-5cc6-405e-8725-d3f49e9f75f8","Type":"ContainerStarted","Data":"98220fa871c21248fb06c9ebc0975609079ca640af49dd453e6a46e33fc6e6a1"} Oct 07 15:12:44 crc kubenswrapper[4672]: I1007 15:12:44.448556 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-crw4m" podStartSLOduration=3.925319 podStartE2EDuration="12.448537613s" podCreationTimestamp="2025-10-07 15:12:32 +0000 UTC" firstStartedPulling="2025-10-07 15:12:35.33421471 +0000 UTC m=+1432.309393291" lastFinishedPulling="2025-10-07 15:12:43.857433323 +0000 UTC m=+1440.832611904" observedRunningTime="2025-10-07 15:12:44.440809048 +0000 UTC m=+1441.415987629" watchObservedRunningTime="2025-10-07 15:12:44.448537613 +0000 UTC m=+1441.423716194" Oct 07 15:12:44 crc kubenswrapper[4672]: I1007 15:12:44.463442 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" podStartSLOduration=2.565463963 podStartE2EDuration="11.463422906s" podCreationTimestamp="2025-10-07 15:12:33 +0000 UTC" firstStartedPulling="2025-10-07 15:12:34.977247787 +0000 UTC m=+1431.952426368" lastFinishedPulling="2025-10-07 15:12:43.87520673 +0000 UTC m=+1440.850385311" observedRunningTime="2025-10-07 15:12:44.461301364 +0000 UTC m=+1441.436479955" watchObservedRunningTime="2025-10-07 15:12:44.463422906 +0000 UTC m=+1441.438601487" Oct 07 15:12:46 crc kubenswrapper[4672]: I1007 15:12:46.452567 4672 generic.go:334] "Generic (PLEG): container finished" podID="6915154f-a1f8-4f93-8a8b-00020a761f95" containerID="a1cd2d491a5141658b6ec6cc8a97bb402c7fb55cc046c620b91db7d771fbf721" exitCode=0 Oct 07 15:12:46 crc kubenswrapper[4672]: I1007 15:12:46.452641 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6915154f-a1f8-4f93-8a8b-00020a761f95","Type":"ContainerDied","Data":"a1cd2d491a5141658b6ec6cc8a97bb402c7fb55cc046c620b91db7d771fbf721"} Oct 07 15:12:47 crc kubenswrapper[4672]: I1007 15:12:47.464965 4672 generic.go:334] "Generic (PLEG): container finished" podID="60a532ee-7772-464b-8f0b-854377647a20" containerID="23310f37f4906beb87f3b35105b8fb2ba22324b1afef4c6f2a9e35f843d0cc4a" exitCode=0 Oct 07 15:12:47 crc kubenswrapper[4672]: I1007 15:12:47.465092 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"60a532ee-7772-464b-8f0b-854377647a20","Type":"ContainerDied","Data":"23310f37f4906beb87f3b35105b8fb2ba22324b1afef4c6f2a9e35f843d0cc4a"} Oct 07 15:12:47 crc kubenswrapper[4672]: I1007 15:12:47.468318 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6915154f-a1f8-4f93-8a8b-00020a761f95","Type":"ContainerStarted","Data":"d054b1c3405443c41066c828266b1c1b9db2ed4e45a1838dae95857baf9dcb35"} Oct 07 15:12:47 crc kubenswrapper[4672]: I1007 15:12:47.468611 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 07 15:12:47 crc kubenswrapper[4672]: I1007 15:12:47.523693 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.523676905 podStartE2EDuration="36.523676905s" podCreationTimestamp="2025-10-07 15:12:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:12:47.521166552 +0000 UTC m=+1444.496345153" watchObservedRunningTime="2025-10-07 15:12:47.523676905 +0000 UTC m=+1444.498855486" Oct 07 15:12:48 crc kubenswrapper[4672]: I1007 15:12:48.480520 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"60a532ee-7772-464b-8f0b-854377647a20","Type":"ContainerStarted","Data":"1e4d7f1890321168eb8ffe4fc0718a843a4334d06060d925eeec7b202d103a93"} Oct 07 15:12:48 crc kubenswrapper[4672]: I1007 15:12:48.480905 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:12:48 crc kubenswrapper[4672]: I1007 15:12:48.509442 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.509424085 podStartE2EDuration="36.509424085s" podCreationTimestamp="2025-10-07 15:12:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:12:48.506987154 +0000 UTC m=+1445.482165755" watchObservedRunningTime="2025-10-07 15:12:48.509424085 +0000 UTC m=+1445.484602666" Oct 07 15:12:53 crc kubenswrapper[4672]: I1007 15:12:53.009119 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:53 crc kubenswrapper[4672]: I1007 15:12:53.010837 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:12:54 crc kubenswrapper[4672]: I1007 15:12:54.065580 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-crw4m" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="registry-server" probeResult="failure" output=< Oct 07 15:12:54 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 15:12:54 crc kubenswrapper[4672]: > Oct 07 15:12:55 crc kubenswrapper[4672]: I1007 15:12:55.548561 4672 generic.go:334] "Generic (PLEG): container finished" podID="fa79d159-5cc6-405e-8725-d3f49e9f75f8" containerID="98220fa871c21248fb06c9ebc0975609079ca640af49dd453e6a46e33fc6e6a1" exitCode=0 Oct 07 15:12:55 crc kubenswrapper[4672]: I1007 15:12:55.548743 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" event={"ID":"fa79d159-5cc6-405e-8725-d3f49e9f75f8","Type":"ContainerDied","Data":"98220fa871c21248fb06c9ebc0975609079ca640af49dd453e6a46e33fc6e6a1"} Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.002250 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.156740 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-inventory\") pod \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.156857 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-repo-setup-combined-ca-bundle\") pod \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.156877 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-ssh-key\") pod \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.156960 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qwrb\" (UniqueName: \"kubernetes.io/projected/fa79d159-5cc6-405e-8725-d3f49e9f75f8-kube-api-access-6qwrb\") pod \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\" (UID: \"fa79d159-5cc6-405e-8725-d3f49e9f75f8\") " Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.161622 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa79d159-5cc6-405e-8725-d3f49e9f75f8-kube-api-access-6qwrb" (OuterVolumeSpecName: "kube-api-access-6qwrb") pod "fa79d159-5cc6-405e-8725-d3f49e9f75f8" (UID: "fa79d159-5cc6-405e-8725-d3f49e9f75f8"). InnerVolumeSpecName "kube-api-access-6qwrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.162513 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "fa79d159-5cc6-405e-8725-d3f49e9f75f8" (UID: "fa79d159-5cc6-405e-8725-d3f49e9f75f8"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.185490 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-inventory" (OuterVolumeSpecName: "inventory") pod "fa79d159-5cc6-405e-8725-d3f49e9f75f8" (UID: "fa79d159-5cc6-405e-8725-d3f49e9f75f8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.186537 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fa79d159-5cc6-405e-8725-d3f49e9f75f8" (UID: "fa79d159-5cc6-405e-8725-d3f49e9f75f8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.259084 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.259113 4672 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.259124 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa79d159-5cc6-405e-8725-d3f49e9f75f8-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.259133 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qwrb\" (UniqueName: \"kubernetes.io/projected/fa79d159-5cc6-405e-8725-d3f49e9f75f8-kube-api-access-6qwrb\") on node \"crc\" DevicePath \"\"" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.568335 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" event={"ID":"fa79d159-5cc6-405e-8725-d3f49e9f75f8","Type":"ContainerDied","Data":"a13692bde09ffb1550ece78cc8e370e2b400a35dfb48783ad06c7249d39df72a"} Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.568657 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a13692bde09ffb1550ece78cc8e370e2b400a35dfb48783ad06c7249d39df72a" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.568377 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.652964 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s"] Oct 07 15:12:57 crc kubenswrapper[4672]: E1007 15:12:57.653356 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa79d159-5cc6-405e-8725-d3f49e9f75f8" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.653378 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa79d159-5cc6-405e-8725-d3f49e9f75f8" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.653609 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa79d159-5cc6-405e-8725-d3f49e9f75f8" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.654212 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.655992 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.656426 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.656552 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.664313 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s"] Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.672271 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.768810 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lrt9s\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.768885 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lrt9s\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.769076 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2www\" (UniqueName: \"kubernetes.io/projected/1861ff34-6944-4cea-950f-8efc95e05f1a-kube-api-access-b2www\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lrt9s\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.871173 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2www\" (UniqueName: \"kubernetes.io/projected/1861ff34-6944-4cea-950f-8efc95e05f1a-kube-api-access-b2www\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lrt9s\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.871237 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lrt9s\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.871301 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lrt9s\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.884610 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lrt9s\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.884610 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lrt9s\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.887195 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2www\" (UniqueName: \"kubernetes.io/projected/1861ff34-6944-4cea-950f-8efc95e05f1a-kube-api-access-b2www\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lrt9s\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:57 crc kubenswrapper[4672]: I1007 15:12:57.973199 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:12:58 crc kubenswrapper[4672]: I1007 15:12:58.496318 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s"] Oct 07 15:12:58 crc kubenswrapper[4672]: I1007 15:12:58.579181 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" event={"ID":"1861ff34-6944-4cea-950f-8efc95e05f1a","Type":"ContainerStarted","Data":"a0b3da43679ccf4020f2070c3fc2d38fbac020b0236365a0097fbdfd22c3578d"} Oct 07 15:13:00 crc kubenswrapper[4672]: I1007 15:13:00.603394 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" event={"ID":"1861ff34-6944-4cea-950f-8efc95e05f1a","Type":"ContainerStarted","Data":"4383c6f430e3a14fc0642863d2eaa328787080f3948c4a8a7bff9c47f3247033"} Oct 07 15:13:00 crc kubenswrapper[4672]: I1007 15:13:00.622996 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" podStartSLOduration=2.758470291 podStartE2EDuration="3.62297328s" podCreationTimestamp="2025-10-07 15:12:57 +0000 UTC" firstStartedPulling="2025-10-07 15:12:58.5051343 +0000 UTC m=+1455.480312881" lastFinishedPulling="2025-10-07 15:12:59.369637289 +0000 UTC m=+1456.344815870" observedRunningTime="2025-10-07 15:13:00.616885513 +0000 UTC m=+1457.592064094" watchObservedRunningTime="2025-10-07 15:13:00.62297328 +0000 UTC m=+1457.598151861" Oct 07 15:13:01 crc kubenswrapper[4672]: I1007 15:13:01.839221 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 07 15:13:02 crc kubenswrapper[4672]: I1007 15:13:02.579552 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 07 15:13:02 crc kubenswrapper[4672]: I1007 15:13:02.623623 4672 generic.go:334] "Generic (PLEG): container finished" podID="1861ff34-6944-4cea-950f-8efc95e05f1a" containerID="4383c6f430e3a14fc0642863d2eaa328787080f3948c4a8a7bff9c47f3247033" exitCode=0 Oct 07 15:13:02 crc kubenswrapper[4672]: I1007 15:13:02.623664 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" event={"ID":"1861ff34-6944-4cea-950f-8efc95e05f1a","Type":"ContainerDied","Data":"4383c6f430e3a14fc0642863d2eaa328787080f3948c4a8a7bff9c47f3247033"} Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.057209 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-crw4m" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="registry-server" probeResult="failure" output=< Oct 07 15:13:04 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 15:13:04 crc kubenswrapper[4672]: > Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.078942 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.192237 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-inventory\") pod \"1861ff34-6944-4cea-950f-8efc95e05f1a\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.192559 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2www\" (UniqueName: \"kubernetes.io/projected/1861ff34-6944-4cea-950f-8efc95e05f1a-kube-api-access-b2www\") pod \"1861ff34-6944-4cea-950f-8efc95e05f1a\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.192739 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-ssh-key\") pod \"1861ff34-6944-4cea-950f-8efc95e05f1a\" (UID: \"1861ff34-6944-4cea-950f-8efc95e05f1a\") " Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.197897 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1861ff34-6944-4cea-950f-8efc95e05f1a-kube-api-access-b2www" (OuterVolumeSpecName: "kube-api-access-b2www") pod "1861ff34-6944-4cea-950f-8efc95e05f1a" (UID: "1861ff34-6944-4cea-950f-8efc95e05f1a"). InnerVolumeSpecName "kube-api-access-b2www". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.220009 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-inventory" (OuterVolumeSpecName: "inventory") pod "1861ff34-6944-4cea-950f-8efc95e05f1a" (UID: "1861ff34-6944-4cea-950f-8efc95e05f1a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.224189 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1861ff34-6944-4cea-950f-8efc95e05f1a" (UID: "1861ff34-6944-4cea-950f-8efc95e05f1a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.295270 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.295319 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2www\" (UniqueName: \"kubernetes.io/projected/1861ff34-6944-4cea-950f-8efc95e05f1a-kube-api-access-b2www\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.295332 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1861ff34-6944-4cea-950f-8efc95e05f1a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.643398 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" event={"ID":"1861ff34-6944-4cea-950f-8efc95e05f1a","Type":"ContainerDied","Data":"a0b3da43679ccf4020f2070c3fc2d38fbac020b0236365a0097fbdfd22c3578d"} Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.643440 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0b3da43679ccf4020f2070c3fc2d38fbac020b0236365a0097fbdfd22c3578d" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.643471 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lrt9s" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.705466 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w"] Oct 07 15:13:04 crc kubenswrapper[4672]: E1007 15:13:04.705917 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1861ff34-6944-4cea-950f-8efc95e05f1a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.705941 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1861ff34-6944-4cea-950f-8efc95e05f1a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.706150 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="1861ff34-6944-4cea-950f-8efc95e05f1a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.707097 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.708698 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.710364 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.710395 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.710418 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.729800 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w"] Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.803177 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhglx\" (UniqueName: \"kubernetes.io/projected/4a1d79b4-a176-48af-9c78-59c7ddd39b71-kube-api-access-fhglx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.803232 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.803425 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.803504 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.904625 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.904681 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.904784 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhglx\" (UniqueName: \"kubernetes.io/projected/4a1d79b4-a176-48af-9c78-59c7ddd39b71-kube-api-access-fhglx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.904816 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.908232 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.908936 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.912303 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:04 crc kubenswrapper[4672]: I1007 15:13:04.919569 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhglx\" (UniqueName: \"kubernetes.io/projected/4a1d79b4-a176-48af-9c78-59c7ddd39b71-kube-api-access-fhglx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:05 crc kubenswrapper[4672]: I1007 15:13:05.027114 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:13:05 crc kubenswrapper[4672]: I1007 15:13:05.559565 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w"] Oct 07 15:13:05 crc kubenswrapper[4672]: I1007 15:13:05.652637 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" event={"ID":"4a1d79b4-a176-48af-9c78-59c7ddd39b71","Type":"ContainerStarted","Data":"d392a7801b41de9fecc2b9fc50a11e3df33bfcf0806b49529aedff7e16a968fb"} Oct 07 15:13:06 crc kubenswrapper[4672]: I1007 15:13:06.664449 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" event={"ID":"4a1d79b4-a176-48af-9c78-59c7ddd39b71","Type":"ContainerStarted","Data":"6c489d452e0126b7666f7a353beee16d307cde07a04b92d59b33212b1e102a15"} Oct 07 15:13:06 crc kubenswrapper[4672]: I1007 15:13:06.693655 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" podStartSLOduration=2.26235585 podStartE2EDuration="2.693634676s" podCreationTimestamp="2025-10-07 15:13:04 +0000 UTC" firstStartedPulling="2025-10-07 15:13:05.561718072 +0000 UTC m=+1462.536896653" lastFinishedPulling="2025-10-07 15:13:05.992996898 +0000 UTC m=+1462.968175479" observedRunningTime="2025-10-07 15:13:06.680062981 +0000 UTC m=+1463.655241592" watchObservedRunningTime="2025-10-07 15:13:06.693634676 +0000 UTC m=+1463.668813257" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.022464 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-skvdm"] Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.026324 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.063760 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-skvdm"] Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.145137 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-utilities\") pod \"community-operators-skvdm\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.145192 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-catalog-content\") pod \"community-operators-skvdm\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.145275 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mc7lx\" (UniqueName: \"kubernetes.io/projected/c744931b-5638-4b92-b1d5-2f96e59133f9-kube-api-access-mc7lx\") pod \"community-operators-skvdm\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.247169 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mc7lx\" (UniqueName: \"kubernetes.io/projected/c744931b-5638-4b92-b1d5-2f96e59133f9-kube-api-access-mc7lx\") pod \"community-operators-skvdm\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.247316 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-utilities\") pod \"community-operators-skvdm\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.247343 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-catalog-content\") pod \"community-operators-skvdm\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.247912 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-catalog-content\") pod \"community-operators-skvdm\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.248498 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-utilities\") pod \"community-operators-skvdm\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.273826 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mc7lx\" (UniqueName: \"kubernetes.io/projected/c744931b-5638-4b92-b1d5-2f96e59133f9-kube-api-access-mc7lx\") pod \"community-operators-skvdm\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.358546 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:07 crc kubenswrapper[4672]: I1007 15:13:07.933233 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-skvdm"] Oct 07 15:13:07 crc kubenswrapper[4672]: W1007 15:13:07.936208 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc744931b_5638_4b92_b1d5_2f96e59133f9.slice/crio-0e0c3d40125e30d2e963bb5e312f452c65a5291766e64590e81508a652765b0f WatchSource:0}: Error finding container 0e0c3d40125e30d2e963bb5e312f452c65a5291766e64590e81508a652765b0f: Status 404 returned error can't find the container with id 0e0c3d40125e30d2e963bb5e312f452c65a5291766e64590e81508a652765b0f Oct 07 15:13:08 crc kubenswrapper[4672]: I1007 15:13:08.680558 4672 generic.go:334] "Generic (PLEG): container finished" podID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerID="a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8" exitCode=0 Oct 07 15:13:08 crc kubenswrapper[4672]: I1007 15:13:08.680816 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skvdm" event={"ID":"c744931b-5638-4b92-b1d5-2f96e59133f9","Type":"ContainerDied","Data":"a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8"} Oct 07 15:13:08 crc kubenswrapper[4672]: I1007 15:13:08.680841 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skvdm" event={"ID":"c744931b-5638-4b92-b1d5-2f96e59133f9","Type":"ContainerStarted","Data":"0e0c3d40125e30d2e963bb5e312f452c65a5291766e64590e81508a652765b0f"} Oct 07 15:13:09 crc kubenswrapper[4672]: I1007 15:13:09.693341 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skvdm" event={"ID":"c744931b-5638-4b92-b1d5-2f96e59133f9","Type":"ContainerStarted","Data":"8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353"} Oct 07 15:13:10 crc kubenswrapper[4672]: I1007 15:13:10.704278 4672 generic.go:334] "Generic (PLEG): container finished" podID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerID="8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353" exitCode=0 Oct 07 15:13:10 crc kubenswrapper[4672]: I1007 15:13:10.704358 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skvdm" event={"ID":"c744931b-5638-4b92-b1d5-2f96e59133f9","Type":"ContainerDied","Data":"8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353"} Oct 07 15:13:11 crc kubenswrapper[4672]: I1007 15:13:11.716317 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skvdm" event={"ID":"c744931b-5638-4b92-b1d5-2f96e59133f9","Type":"ContainerStarted","Data":"98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c"} Oct 07 15:13:11 crc kubenswrapper[4672]: I1007 15:13:11.738158 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-skvdm" podStartSLOduration=2.230393635 podStartE2EDuration="4.738133667s" podCreationTimestamp="2025-10-07 15:13:07 +0000 UTC" firstStartedPulling="2025-10-07 15:13:08.682879924 +0000 UTC m=+1465.658058505" lastFinishedPulling="2025-10-07 15:13:11.190619956 +0000 UTC m=+1468.165798537" observedRunningTime="2025-10-07 15:13:11.733235544 +0000 UTC m=+1468.708414125" watchObservedRunningTime="2025-10-07 15:13:11.738133667 +0000 UTC m=+1468.713312258" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.058666 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-crw4m" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="registry-server" probeResult="failure" output=< Oct 07 15:13:14 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 15:13:14 crc kubenswrapper[4672]: > Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.405210 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dqb96"] Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.407324 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.419348 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqb96"] Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.517681 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-utilities\") pod \"redhat-marketplace-dqb96\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.517875 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-catalog-content\") pod \"redhat-marketplace-dqb96\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.517913 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9crdv\" (UniqueName: \"kubernetes.io/projected/f9c0198c-00ec-42af-992f-7d903bac3889-kube-api-access-9crdv\") pod \"redhat-marketplace-dqb96\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.618901 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-catalog-content\") pod \"redhat-marketplace-dqb96\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.618959 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9crdv\" (UniqueName: \"kubernetes.io/projected/f9c0198c-00ec-42af-992f-7d903bac3889-kube-api-access-9crdv\") pod \"redhat-marketplace-dqb96\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.618997 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-utilities\") pod \"redhat-marketplace-dqb96\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.628487 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-catalog-content\") pod \"redhat-marketplace-dqb96\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.634402 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-utilities\") pod \"redhat-marketplace-dqb96\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.647578 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9crdv\" (UniqueName: \"kubernetes.io/projected/f9c0198c-00ec-42af-992f-7d903bac3889-kube-api-access-9crdv\") pod \"redhat-marketplace-dqb96\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:14 crc kubenswrapper[4672]: I1007 15:13:14.727668 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:15 crc kubenswrapper[4672]: I1007 15:13:15.212149 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqb96"] Oct 07 15:13:15 crc kubenswrapper[4672]: W1007 15:13:15.215935 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9c0198c_00ec_42af_992f_7d903bac3889.slice/crio-0e1b00e28de12b43c937bba21c9623a548952390f8de78156881810387320f8d WatchSource:0}: Error finding container 0e1b00e28de12b43c937bba21c9623a548952390f8de78156881810387320f8d: Status 404 returned error can't find the container with id 0e1b00e28de12b43c937bba21c9623a548952390f8de78156881810387320f8d Oct 07 15:13:15 crc kubenswrapper[4672]: I1007 15:13:15.753888 4672 generic.go:334] "Generic (PLEG): container finished" podID="f9c0198c-00ec-42af-992f-7d903bac3889" containerID="7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a" exitCode=0 Oct 07 15:13:15 crc kubenswrapper[4672]: I1007 15:13:15.753929 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqb96" event={"ID":"f9c0198c-00ec-42af-992f-7d903bac3889","Type":"ContainerDied","Data":"7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a"} Oct 07 15:13:15 crc kubenswrapper[4672]: I1007 15:13:15.753953 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqb96" event={"ID":"f9c0198c-00ec-42af-992f-7d903bac3889","Type":"ContainerStarted","Data":"0e1b00e28de12b43c937bba21c9623a548952390f8de78156881810387320f8d"} Oct 07 15:13:16 crc kubenswrapper[4672]: I1007 15:13:16.764368 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqb96" event={"ID":"f9c0198c-00ec-42af-992f-7d903bac3889","Type":"ContainerStarted","Data":"3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b"} Oct 07 15:13:17 crc kubenswrapper[4672]: I1007 15:13:17.359216 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:17 crc kubenswrapper[4672]: I1007 15:13:17.359361 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:17 crc kubenswrapper[4672]: I1007 15:13:17.411740 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:17 crc kubenswrapper[4672]: I1007 15:13:17.775857 4672 generic.go:334] "Generic (PLEG): container finished" podID="f9c0198c-00ec-42af-992f-7d903bac3889" containerID="3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b" exitCode=0 Oct 07 15:13:17 crc kubenswrapper[4672]: I1007 15:13:17.775957 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqb96" event={"ID":"f9c0198c-00ec-42af-992f-7d903bac3889","Type":"ContainerDied","Data":"3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b"} Oct 07 15:13:17 crc kubenswrapper[4672]: I1007 15:13:17.831729 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:18 crc kubenswrapper[4672]: I1007 15:13:18.788493 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqb96" event={"ID":"f9c0198c-00ec-42af-992f-7d903bac3889","Type":"ContainerStarted","Data":"e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900"} Oct 07 15:13:18 crc kubenswrapper[4672]: I1007 15:13:18.806053 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dqb96" podStartSLOduration=2.337204009 podStartE2EDuration="4.806035298s" podCreationTimestamp="2025-10-07 15:13:14 +0000 UTC" firstStartedPulling="2025-10-07 15:13:15.755807431 +0000 UTC m=+1472.730986012" lastFinishedPulling="2025-10-07 15:13:18.22463872 +0000 UTC m=+1475.199817301" observedRunningTime="2025-10-07 15:13:18.804886554 +0000 UTC m=+1475.780065155" watchObservedRunningTime="2025-10-07 15:13:18.806035298 +0000 UTC m=+1475.781213879" Oct 07 15:13:19 crc kubenswrapper[4672]: I1007 15:13:19.800747 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-skvdm"] Oct 07 15:13:19 crc kubenswrapper[4672]: I1007 15:13:19.800964 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-skvdm" podUID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerName="registry-server" containerID="cri-o://98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c" gracePeriod=2 Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.268564 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.450114 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-catalog-content\") pod \"c744931b-5638-4b92-b1d5-2f96e59133f9\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.450394 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-utilities\") pod \"c744931b-5638-4b92-b1d5-2f96e59133f9\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.450518 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mc7lx\" (UniqueName: \"kubernetes.io/projected/c744931b-5638-4b92-b1d5-2f96e59133f9-kube-api-access-mc7lx\") pod \"c744931b-5638-4b92-b1d5-2f96e59133f9\" (UID: \"c744931b-5638-4b92-b1d5-2f96e59133f9\") " Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.450977 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-utilities" (OuterVolumeSpecName: "utilities") pod "c744931b-5638-4b92-b1d5-2f96e59133f9" (UID: "c744931b-5638-4b92-b1d5-2f96e59133f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.457144 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c744931b-5638-4b92-b1d5-2f96e59133f9-kube-api-access-mc7lx" (OuterVolumeSpecName: "kube-api-access-mc7lx") pod "c744931b-5638-4b92-b1d5-2f96e59133f9" (UID: "c744931b-5638-4b92-b1d5-2f96e59133f9"). InnerVolumeSpecName "kube-api-access-mc7lx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.491699 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c744931b-5638-4b92-b1d5-2f96e59133f9" (UID: "c744931b-5638-4b92-b1d5-2f96e59133f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.553479 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.553529 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mc7lx\" (UniqueName: \"kubernetes.io/projected/c744931b-5638-4b92-b1d5-2f96e59133f9-kube-api-access-mc7lx\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.553546 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c744931b-5638-4b92-b1d5-2f96e59133f9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.814351 4672 generic.go:334] "Generic (PLEG): container finished" podID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerID="98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c" exitCode=0 Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.814522 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skvdm" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.814559 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skvdm" event={"ID":"c744931b-5638-4b92-b1d5-2f96e59133f9","Type":"ContainerDied","Data":"98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c"} Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.815078 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skvdm" event={"ID":"c744931b-5638-4b92-b1d5-2f96e59133f9","Type":"ContainerDied","Data":"0e0c3d40125e30d2e963bb5e312f452c65a5291766e64590e81508a652765b0f"} Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.815114 4672 scope.go:117] "RemoveContainer" containerID="98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.851143 4672 scope.go:117] "RemoveContainer" containerID="8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.860180 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-skvdm"] Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.872256 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-skvdm"] Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.874524 4672 scope.go:117] "RemoveContainer" containerID="a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.923554 4672 scope.go:117] "RemoveContainer" containerID="98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c" Oct 07 15:13:20 crc kubenswrapper[4672]: E1007 15:13:20.924198 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c\": container with ID starting with 98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c not found: ID does not exist" containerID="98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.924244 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c"} err="failed to get container status \"98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c\": rpc error: code = NotFound desc = could not find container \"98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c\": container with ID starting with 98e8c924cd3e2df23a90be8c5ad2e74734945a612a045dc83f3999c44ddf090c not found: ID does not exist" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.924274 4672 scope.go:117] "RemoveContainer" containerID="8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353" Oct 07 15:13:20 crc kubenswrapper[4672]: E1007 15:13:20.924784 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353\": container with ID starting with 8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353 not found: ID does not exist" containerID="8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.924814 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353"} err="failed to get container status \"8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353\": rpc error: code = NotFound desc = could not find container \"8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353\": container with ID starting with 8bd3b2512f554c5a7904d5364cdfd9a6c7d3fe6649d82d69b75f6c308aa89353 not found: ID does not exist" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.924836 4672 scope.go:117] "RemoveContainer" containerID="a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8" Oct 07 15:13:20 crc kubenswrapper[4672]: E1007 15:13:20.925380 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8\": container with ID starting with a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8 not found: ID does not exist" containerID="a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8" Oct 07 15:13:20 crc kubenswrapper[4672]: I1007 15:13:20.925403 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8"} err="failed to get container status \"a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8\": rpc error: code = NotFound desc = could not find container \"a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8\": container with ID starting with a8c643a4ddcffc6d12f98b9d6d93167a9c546578b4393f975ab159230579e8e8 not found: ID does not exist" Oct 07 15:13:21 crc kubenswrapper[4672]: I1007 15:13:21.903700 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c744931b-5638-4b92-b1d5-2f96e59133f9" path="/var/lib/kubelet/pods/c744931b-5638-4b92-b1d5-2f96e59133f9/volumes" Oct 07 15:13:23 crc kubenswrapper[4672]: I1007 15:13:23.052894 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:13:23 crc kubenswrapper[4672]: I1007 15:13:23.103935 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:13:23 crc kubenswrapper[4672]: I1007 15:13:23.796100 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-crw4m"] Oct 07 15:13:24 crc kubenswrapper[4672]: I1007 15:13:24.727852 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:24 crc kubenswrapper[4672]: I1007 15:13:24.728231 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:24 crc kubenswrapper[4672]: I1007 15:13:24.790580 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:24 crc kubenswrapper[4672]: I1007 15:13:24.862869 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-crw4m" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="registry-server" containerID="cri-o://21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be" gracePeriod=2 Oct 07 15:13:24 crc kubenswrapper[4672]: I1007 15:13:24.910372 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.307281 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.443644 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-catalog-content\") pod \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.443746 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w9lb\" (UniqueName: \"kubernetes.io/projected/dfe31691-5af9-4d45-a35c-58f41e7dbb60-kube-api-access-9w9lb\") pod \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.443945 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-utilities\") pod \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\" (UID: \"dfe31691-5af9-4d45-a35c-58f41e7dbb60\") " Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.444807 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-utilities" (OuterVolumeSpecName: "utilities") pod "dfe31691-5af9-4d45-a35c-58f41e7dbb60" (UID: "dfe31691-5af9-4d45-a35c-58f41e7dbb60"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.449233 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfe31691-5af9-4d45-a35c-58f41e7dbb60-kube-api-access-9w9lb" (OuterVolumeSpecName: "kube-api-access-9w9lb") pod "dfe31691-5af9-4d45-a35c-58f41e7dbb60" (UID: "dfe31691-5af9-4d45-a35c-58f41e7dbb60"). InnerVolumeSpecName "kube-api-access-9w9lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.523722 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dfe31691-5af9-4d45-a35c-58f41e7dbb60" (UID: "dfe31691-5af9-4d45-a35c-58f41e7dbb60"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.546730 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.546779 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w9lb\" (UniqueName: \"kubernetes.io/projected/dfe31691-5af9-4d45-a35c-58f41e7dbb60-kube-api-access-9w9lb\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.546799 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe31691-5af9-4d45-a35c-58f41e7dbb60-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.873131 4672 generic.go:334] "Generic (PLEG): container finished" podID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerID="21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be" exitCode=0 Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.873177 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-crw4m" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.873215 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crw4m" event={"ID":"dfe31691-5af9-4d45-a35c-58f41e7dbb60","Type":"ContainerDied","Data":"21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be"} Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.873237 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crw4m" event={"ID":"dfe31691-5af9-4d45-a35c-58f41e7dbb60","Type":"ContainerDied","Data":"7834500cef88559d39b4a912f6717cea43fe10fb4611a049bad67cd06a1e4e67"} Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.873252 4672 scope.go:117] "RemoveContainer" containerID="21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.902322 4672 scope.go:117] "RemoveContainer" containerID="b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.910627 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-crw4m"] Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.916655 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-crw4m"] Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.933152 4672 scope.go:117] "RemoveContainer" containerID="f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.959207 4672 scope.go:117] "RemoveContainer" containerID="21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be" Oct 07 15:13:25 crc kubenswrapper[4672]: E1007 15:13:25.960585 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be\": container with ID starting with 21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be not found: ID does not exist" containerID="21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.960670 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be"} err="failed to get container status \"21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be\": rpc error: code = NotFound desc = could not find container \"21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be\": container with ID starting with 21545153a9dad07c1e68cfce776b5ef557fc4dbc0b1596e68cd4edcbf324b4be not found: ID does not exist" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.960701 4672 scope.go:117] "RemoveContainer" containerID="b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8" Oct 07 15:13:25 crc kubenswrapper[4672]: E1007 15:13:25.961146 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8\": container with ID starting with b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8 not found: ID does not exist" containerID="b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.961192 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8"} err="failed to get container status \"b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8\": rpc error: code = NotFound desc = could not find container \"b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8\": container with ID starting with b8dfaf99048a6be928d49484e1e2a1329849d72b45329ed2a87ea613f2a6b1f8 not found: ID does not exist" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.961219 4672 scope.go:117] "RemoveContainer" containerID="f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9" Oct 07 15:13:25 crc kubenswrapper[4672]: E1007 15:13:25.961502 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9\": container with ID starting with f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9 not found: ID does not exist" containerID="f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9" Oct 07 15:13:25 crc kubenswrapper[4672]: I1007 15:13:25.961539 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9"} err="failed to get container status \"f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9\": rpc error: code = NotFound desc = could not find container \"f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9\": container with ID starting with f5d85c40c82446c90297290f62cc3e32c1eb69fb3b72e8ab90f54ef076e0b2f9 not found: ID does not exist" Oct 07 15:13:26 crc kubenswrapper[4672]: I1007 15:13:26.650061 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:13:26 crc kubenswrapper[4672]: I1007 15:13:26.650368 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.196820 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqb96"] Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.197077 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dqb96" podUID="f9c0198c-00ec-42af-992f-7d903bac3889" containerName="registry-server" containerID="cri-o://e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900" gracePeriod=2 Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.611839 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.686327 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-catalog-content\") pod \"f9c0198c-00ec-42af-992f-7d903bac3889\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.686436 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9crdv\" (UniqueName: \"kubernetes.io/projected/f9c0198c-00ec-42af-992f-7d903bac3889-kube-api-access-9crdv\") pod \"f9c0198c-00ec-42af-992f-7d903bac3889\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.686497 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-utilities\") pod \"f9c0198c-00ec-42af-992f-7d903bac3889\" (UID: \"f9c0198c-00ec-42af-992f-7d903bac3889\") " Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.688656 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-utilities" (OuterVolumeSpecName: "utilities") pod "f9c0198c-00ec-42af-992f-7d903bac3889" (UID: "f9c0198c-00ec-42af-992f-7d903bac3889"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.689156 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.697549 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9c0198c-00ec-42af-992f-7d903bac3889-kube-api-access-9crdv" (OuterVolumeSpecName: "kube-api-access-9crdv") pod "f9c0198c-00ec-42af-992f-7d903bac3889" (UID: "f9c0198c-00ec-42af-992f-7d903bac3889"). InnerVolumeSpecName "kube-api-access-9crdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.698683 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9c0198c-00ec-42af-992f-7d903bac3889" (UID: "f9c0198c-00ec-42af-992f-7d903bac3889"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.790697 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c0198c-00ec-42af-992f-7d903bac3889-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.790945 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9crdv\" (UniqueName: \"kubernetes.io/projected/f9c0198c-00ec-42af-992f-7d903bac3889-kube-api-access-9crdv\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.894861 4672 generic.go:334] "Generic (PLEG): container finished" podID="f9c0198c-00ec-42af-992f-7d903bac3889" containerID="e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900" exitCode=0 Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.894967 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqb96" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.903222 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" path="/var/lib/kubelet/pods/dfe31691-5af9-4d45-a35c-58f41e7dbb60/volumes" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.903950 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqb96" event={"ID":"f9c0198c-00ec-42af-992f-7d903bac3889","Type":"ContainerDied","Data":"e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900"} Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.903976 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqb96" event={"ID":"f9c0198c-00ec-42af-992f-7d903bac3889","Type":"ContainerDied","Data":"0e1b00e28de12b43c937bba21c9623a548952390f8de78156881810387320f8d"} Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.903993 4672 scope.go:117] "RemoveContainer" containerID="e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.927003 4672 scope.go:117] "RemoveContainer" containerID="3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b" Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.931213 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqb96"] Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.939483 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqb96"] Oct 07 15:13:27 crc kubenswrapper[4672]: I1007 15:13:27.945091 4672 scope.go:117] "RemoveContainer" containerID="7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a" Oct 07 15:13:28 crc kubenswrapper[4672]: I1007 15:13:28.005389 4672 scope.go:117] "RemoveContainer" containerID="e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900" Oct 07 15:13:28 crc kubenswrapper[4672]: E1007 15:13:28.006093 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900\": container with ID starting with e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900 not found: ID does not exist" containerID="e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900" Oct 07 15:13:28 crc kubenswrapper[4672]: I1007 15:13:28.006139 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900"} err="failed to get container status \"e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900\": rpc error: code = NotFound desc = could not find container \"e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900\": container with ID starting with e664fa844623fef354f0720310512468c96cd77b3f7e2f32161bfb0ce5830900 not found: ID does not exist" Oct 07 15:13:28 crc kubenswrapper[4672]: I1007 15:13:28.006168 4672 scope.go:117] "RemoveContainer" containerID="3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b" Oct 07 15:13:28 crc kubenswrapper[4672]: E1007 15:13:28.006533 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b\": container with ID starting with 3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b not found: ID does not exist" containerID="3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b" Oct 07 15:13:28 crc kubenswrapper[4672]: I1007 15:13:28.006562 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b"} err="failed to get container status \"3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b\": rpc error: code = NotFound desc = could not find container \"3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b\": container with ID starting with 3033a9a0c7bfa20b03c5ec0f3bd7f2c0748f4f89d2b29ae5fbd2fe971b4ad48b not found: ID does not exist" Oct 07 15:13:28 crc kubenswrapper[4672]: I1007 15:13:28.006595 4672 scope.go:117] "RemoveContainer" containerID="7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a" Oct 07 15:13:28 crc kubenswrapper[4672]: E1007 15:13:28.006921 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a\": container with ID starting with 7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a not found: ID does not exist" containerID="7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a" Oct 07 15:13:28 crc kubenswrapper[4672]: I1007 15:13:28.006982 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a"} err="failed to get container status \"7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a\": rpc error: code = NotFound desc = could not find container \"7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a\": container with ID starting with 7afd66d165bb8ff4cbcf0618fc1fd9f7eef2fa70e42cfff4568ac8d8109a907a not found: ID does not exist" Oct 07 15:13:29 crc kubenswrapper[4672]: I1007 15:13:29.901333 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9c0198c-00ec-42af-992f-7d903bac3889" path="/var/lib/kubelet/pods/f9c0198c-00ec-42af-992f-7d903bac3889/volumes" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.503646 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hphjc"] Oct 07 15:13:36 crc kubenswrapper[4672]: E1007 15:13:36.504653 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="extract-content" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.504669 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="extract-content" Oct 07 15:13:36 crc kubenswrapper[4672]: E1007 15:13:36.504678 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerName="registry-server" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.504686 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerName="registry-server" Oct 07 15:13:36 crc kubenswrapper[4672]: E1007 15:13:36.504699 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerName="extract-content" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.504708 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerName="extract-content" Oct 07 15:13:36 crc kubenswrapper[4672]: E1007 15:13:36.504716 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c0198c-00ec-42af-992f-7d903bac3889" containerName="extract-utilities" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.504724 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c0198c-00ec-42af-992f-7d903bac3889" containerName="extract-utilities" Oct 07 15:13:36 crc kubenswrapper[4672]: E1007 15:13:36.504734 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="extract-utilities" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.504742 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="extract-utilities" Oct 07 15:13:36 crc kubenswrapper[4672]: E1007 15:13:36.504754 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerName="extract-utilities" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.504761 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerName="extract-utilities" Oct 07 15:13:36 crc kubenswrapper[4672]: E1007 15:13:36.504778 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c0198c-00ec-42af-992f-7d903bac3889" containerName="registry-server" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.504786 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c0198c-00ec-42af-992f-7d903bac3889" containerName="registry-server" Oct 07 15:13:36 crc kubenswrapper[4672]: E1007 15:13:36.504795 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="registry-server" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.504802 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="registry-server" Oct 07 15:13:36 crc kubenswrapper[4672]: E1007 15:13:36.504810 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c0198c-00ec-42af-992f-7d903bac3889" containerName="extract-content" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.504817 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c0198c-00ec-42af-992f-7d903bac3889" containerName="extract-content" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.505218 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9c0198c-00ec-42af-992f-7d903bac3889" containerName="registry-server" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.505265 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfe31691-5af9-4d45-a35c-58f41e7dbb60" containerName="registry-server" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.505279 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="c744931b-5638-4b92-b1d5-2f96e59133f9" containerName="registry-server" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.507088 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.517166 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hphjc"] Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.648049 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-catalog-content\") pod \"certified-operators-hphjc\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.648401 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx7qk\" (UniqueName: \"kubernetes.io/projected/fa81d067-213a-4ed6-823e-2447d93ea622-kube-api-access-cx7qk\") pod \"certified-operators-hphjc\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.649043 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-utilities\") pod \"certified-operators-hphjc\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.750646 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-utilities\") pod \"certified-operators-hphjc\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.750753 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-catalog-content\") pod \"certified-operators-hphjc\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.750795 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx7qk\" (UniqueName: \"kubernetes.io/projected/fa81d067-213a-4ed6-823e-2447d93ea622-kube-api-access-cx7qk\") pod \"certified-operators-hphjc\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.751124 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-utilities\") pod \"certified-operators-hphjc\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.751378 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-catalog-content\") pod \"certified-operators-hphjc\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.780746 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx7qk\" (UniqueName: \"kubernetes.io/projected/fa81d067-213a-4ed6-823e-2447d93ea622-kube-api-access-cx7qk\") pod \"certified-operators-hphjc\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:36 crc kubenswrapper[4672]: I1007 15:13:36.829805 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:37 crc kubenswrapper[4672]: I1007 15:13:37.468828 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hphjc"] Oct 07 15:13:38 crc kubenswrapper[4672]: I1007 15:13:38.018695 4672 generic.go:334] "Generic (PLEG): container finished" podID="fa81d067-213a-4ed6-823e-2447d93ea622" containerID="4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a" exitCode=0 Oct 07 15:13:38 crc kubenswrapper[4672]: I1007 15:13:38.018742 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hphjc" event={"ID":"fa81d067-213a-4ed6-823e-2447d93ea622","Type":"ContainerDied","Data":"4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a"} Oct 07 15:13:38 crc kubenswrapper[4672]: I1007 15:13:38.019096 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hphjc" event={"ID":"fa81d067-213a-4ed6-823e-2447d93ea622","Type":"ContainerStarted","Data":"12e9c60acbc607328c25d2c046d30845fcd3b2d83d4f92a9dd1c9deee652a9d0"} Oct 07 15:13:40 crc kubenswrapper[4672]: I1007 15:13:40.039107 4672 generic.go:334] "Generic (PLEG): container finished" podID="fa81d067-213a-4ed6-823e-2447d93ea622" containerID="ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e" exitCode=0 Oct 07 15:13:40 crc kubenswrapper[4672]: I1007 15:13:40.039304 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hphjc" event={"ID":"fa81d067-213a-4ed6-823e-2447d93ea622","Type":"ContainerDied","Data":"ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e"} Oct 07 15:13:41 crc kubenswrapper[4672]: I1007 15:13:41.054777 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hphjc" event={"ID":"fa81d067-213a-4ed6-823e-2447d93ea622","Type":"ContainerStarted","Data":"7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41"} Oct 07 15:13:41 crc kubenswrapper[4672]: I1007 15:13:41.080292 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hphjc" podStartSLOduration=2.644347785 podStartE2EDuration="5.080265708s" podCreationTimestamp="2025-10-07 15:13:36 +0000 UTC" firstStartedPulling="2025-10-07 15:13:38.020468358 +0000 UTC m=+1494.995646939" lastFinishedPulling="2025-10-07 15:13:40.456386281 +0000 UTC m=+1497.431564862" observedRunningTime="2025-10-07 15:13:41.075527069 +0000 UTC m=+1498.050705680" watchObservedRunningTime="2025-10-07 15:13:41.080265708 +0000 UTC m=+1498.055444289" Oct 07 15:13:46 crc kubenswrapper[4672]: I1007 15:13:46.830345 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:46 crc kubenswrapper[4672]: I1007 15:13:46.830830 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:46 crc kubenswrapper[4672]: I1007 15:13:46.873956 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:47 crc kubenswrapper[4672]: I1007 15:13:47.156430 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:47 crc kubenswrapper[4672]: I1007 15:13:47.203042 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hphjc"] Oct 07 15:13:49 crc kubenswrapper[4672]: I1007 15:13:49.125740 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hphjc" podUID="fa81d067-213a-4ed6-823e-2447d93ea622" containerName="registry-server" containerID="cri-o://7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41" gracePeriod=2 Oct 07 15:13:49 crc kubenswrapper[4672]: I1007 15:13:49.552999 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:49 crc kubenswrapper[4672]: I1007 15:13:49.680233 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-utilities\") pod \"fa81d067-213a-4ed6-823e-2447d93ea622\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " Oct 07 15:13:49 crc kubenswrapper[4672]: I1007 15:13:49.680290 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-catalog-content\") pod \"fa81d067-213a-4ed6-823e-2447d93ea622\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " Oct 07 15:13:49 crc kubenswrapper[4672]: I1007 15:13:49.680326 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cx7qk\" (UniqueName: \"kubernetes.io/projected/fa81d067-213a-4ed6-823e-2447d93ea622-kube-api-access-cx7qk\") pod \"fa81d067-213a-4ed6-823e-2447d93ea622\" (UID: \"fa81d067-213a-4ed6-823e-2447d93ea622\") " Oct 07 15:13:49 crc kubenswrapper[4672]: I1007 15:13:49.681784 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-utilities" (OuterVolumeSpecName: "utilities") pod "fa81d067-213a-4ed6-823e-2447d93ea622" (UID: "fa81d067-213a-4ed6-823e-2447d93ea622"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:13:49 crc kubenswrapper[4672]: I1007 15:13:49.686279 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa81d067-213a-4ed6-823e-2447d93ea622-kube-api-access-cx7qk" (OuterVolumeSpecName: "kube-api-access-cx7qk") pod "fa81d067-213a-4ed6-823e-2447d93ea622" (UID: "fa81d067-213a-4ed6-823e-2447d93ea622"). InnerVolumeSpecName "kube-api-access-cx7qk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:13:49 crc kubenswrapper[4672]: I1007 15:13:49.782716 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:49 crc kubenswrapper[4672]: I1007 15:13:49.782758 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx7qk\" (UniqueName: \"kubernetes.io/projected/fa81d067-213a-4ed6-823e-2447d93ea622-kube-api-access-cx7qk\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.120762 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa81d067-213a-4ed6-823e-2447d93ea622" (UID: "fa81d067-213a-4ed6-823e-2447d93ea622"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.135834 4672 generic.go:334] "Generic (PLEG): container finished" podID="fa81d067-213a-4ed6-823e-2447d93ea622" containerID="7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41" exitCode=0 Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.135894 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hphjc" event={"ID":"fa81d067-213a-4ed6-823e-2447d93ea622","Type":"ContainerDied","Data":"7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41"} Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.135916 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hphjc" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.135938 4672 scope.go:117] "RemoveContainer" containerID="7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.135926 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hphjc" event={"ID":"fa81d067-213a-4ed6-823e-2447d93ea622","Type":"ContainerDied","Data":"12e9c60acbc607328c25d2c046d30845fcd3b2d83d4f92a9dd1c9deee652a9d0"} Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.154277 4672 scope.go:117] "RemoveContainer" containerID="ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.173320 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hphjc"] Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.178344 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hphjc"] Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.190440 4672 scope.go:117] "RemoveContainer" containerID="4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.190969 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa81d067-213a-4ed6-823e-2447d93ea622-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.217791 4672 scope.go:117] "RemoveContainer" containerID="7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41" Oct 07 15:13:50 crc kubenswrapper[4672]: E1007 15:13:50.218275 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41\": container with ID starting with 7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41 not found: ID does not exist" containerID="7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.218329 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41"} err="failed to get container status \"7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41\": rpc error: code = NotFound desc = could not find container \"7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41\": container with ID starting with 7c9b2bbf7dbfabd35c433fd5e33e7e445576e1a93af1bbdbd2e5c96306f8df41 not found: ID does not exist" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.218366 4672 scope.go:117] "RemoveContainer" containerID="ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e" Oct 07 15:13:50 crc kubenswrapper[4672]: E1007 15:13:50.219404 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e\": container with ID starting with ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e not found: ID does not exist" containerID="ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.219452 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e"} err="failed to get container status \"ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e\": rpc error: code = NotFound desc = could not find container \"ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e\": container with ID starting with ba4f49a0f7081e2b3b8f6f99486b04eece8d3911e6e059b9a3f95f74cfcab46e not found: ID does not exist" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.219480 4672 scope.go:117] "RemoveContainer" containerID="4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a" Oct 07 15:13:50 crc kubenswrapper[4672]: E1007 15:13:50.219903 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a\": container with ID starting with 4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a not found: ID does not exist" containerID="4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a" Oct 07 15:13:50 crc kubenswrapper[4672]: I1007 15:13:50.219973 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a"} err="failed to get container status \"4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a\": rpc error: code = NotFound desc = could not find container \"4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a\": container with ID starting with 4e608a6b786792778d19ab45588bcb2b8298721619be80fa6b4cb0ea937d980a not found: ID does not exist" Oct 07 15:13:51 crc kubenswrapper[4672]: I1007 15:13:51.905369 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa81d067-213a-4ed6-823e-2447d93ea622" path="/var/lib/kubelet/pods/fa81d067-213a-4ed6-823e-2447d93ea622/volumes" Oct 07 15:13:52 crc kubenswrapper[4672]: I1007 15:13:52.629730 4672 scope.go:117] "RemoveContainer" containerID="7b52ee40d1b537c62ddfe2baf9d73827602317856984abd144d04875a6ab0911" Oct 07 15:13:56 crc kubenswrapper[4672]: I1007 15:13:56.650415 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:13:56 crc kubenswrapper[4672]: I1007 15:13:56.650889 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:14:26 crc kubenswrapper[4672]: I1007 15:14:26.650109 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:14:26 crc kubenswrapper[4672]: I1007 15:14:26.650625 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:14:26 crc kubenswrapper[4672]: I1007 15:14:26.650672 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:14:26 crc kubenswrapper[4672]: I1007 15:14:26.651423 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:14:26 crc kubenswrapper[4672]: I1007 15:14:26.651500 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" gracePeriod=600 Oct 07 15:14:26 crc kubenswrapper[4672]: E1007 15:14:26.771797 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:14:27 crc kubenswrapper[4672]: I1007 15:14:27.464275 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" exitCode=0 Oct 07 15:14:27 crc kubenswrapper[4672]: I1007 15:14:27.464334 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429"} Oct 07 15:14:27 crc kubenswrapper[4672]: I1007 15:14:27.464609 4672 scope.go:117] "RemoveContainer" containerID="c72fdf37ef4e6a98125f2c906a887f33d0d50801813254b3dbe632e126d1639a" Oct 07 15:14:27 crc kubenswrapper[4672]: I1007 15:14:27.465177 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:14:27 crc kubenswrapper[4672]: E1007 15:14:27.465530 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:14:41 crc kubenswrapper[4672]: I1007 15:14:41.892424 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:14:41 crc kubenswrapper[4672]: E1007 15:14:41.893415 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:14:52 crc kubenswrapper[4672]: I1007 15:14:52.773223 4672 scope.go:117] "RemoveContainer" containerID="58690f3b57da320b8f4efe3215a4333c170fd1ebefa254e9a59e3db1090842ad" Oct 07 15:14:56 crc kubenswrapper[4672]: I1007 15:14:56.891605 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:14:56 crc kubenswrapper[4672]: E1007 15:14:56.892071 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.146170 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj"] Oct 07 15:15:00 crc kubenswrapper[4672]: E1007 15:15:00.146992 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa81d067-213a-4ed6-823e-2447d93ea622" containerName="extract-utilities" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.147034 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa81d067-213a-4ed6-823e-2447d93ea622" containerName="extract-utilities" Oct 07 15:15:00 crc kubenswrapper[4672]: E1007 15:15:00.147050 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa81d067-213a-4ed6-823e-2447d93ea622" containerName="extract-content" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.147057 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa81d067-213a-4ed6-823e-2447d93ea622" containerName="extract-content" Oct 07 15:15:00 crc kubenswrapper[4672]: E1007 15:15:00.147073 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa81d067-213a-4ed6-823e-2447d93ea622" containerName="registry-server" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.147079 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa81d067-213a-4ed6-823e-2447d93ea622" containerName="registry-server" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.147414 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa81d067-213a-4ed6-823e-2447d93ea622" containerName="registry-server" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.148244 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.150695 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.151164 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.199154 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj"] Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.310685 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vmw4\" (UniqueName: \"kubernetes.io/projected/f018986c-bf8a-43ac-b89b-dfa0d276b31f-kube-api-access-2vmw4\") pod \"collect-profiles-29330835-wj2pj\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.310871 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f018986c-bf8a-43ac-b89b-dfa0d276b31f-secret-volume\") pod \"collect-profiles-29330835-wj2pj\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.310900 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f018986c-bf8a-43ac-b89b-dfa0d276b31f-config-volume\") pod \"collect-profiles-29330835-wj2pj\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.412452 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vmw4\" (UniqueName: \"kubernetes.io/projected/f018986c-bf8a-43ac-b89b-dfa0d276b31f-kube-api-access-2vmw4\") pod \"collect-profiles-29330835-wj2pj\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.412539 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f018986c-bf8a-43ac-b89b-dfa0d276b31f-secret-volume\") pod \"collect-profiles-29330835-wj2pj\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.412558 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f018986c-bf8a-43ac-b89b-dfa0d276b31f-config-volume\") pod \"collect-profiles-29330835-wj2pj\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.413721 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f018986c-bf8a-43ac-b89b-dfa0d276b31f-config-volume\") pod \"collect-profiles-29330835-wj2pj\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.418190 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f018986c-bf8a-43ac-b89b-dfa0d276b31f-secret-volume\") pod \"collect-profiles-29330835-wj2pj\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.431303 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vmw4\" (UniqueName: \"kubernetes.io/projected/f018986c-bf8a-43ac-b89b-dfa0d276b31f-kube-api-access-2vmw4\") pod \"collect-profiles-29330835-wj2pj\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.512768 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:00 crc kubenswrapper[4672]: I1007 15:15:00.939267 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj"] Oct 07 15:15:01 crc kubenswrapper[4672]: I1007 15:15:01.738730 4672 generic.go:334] "Generic (PLEG): container finished" podID="f018986c-bf8a-43ac-b89b-dfa0d276b31f" containerID="e58cb2445728215f52ba900dcdfa41019de6abfd24e0889aa31e3d0d7d2c879e" exitCode=0 Oct 07 15:15:01 crc kubenswrapper[4672]: I1007 15:15:01.738792 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" event={"ID":"f018986c-bf8a-43ac-b89b-dfa0d276b31f","Type":"ContainerDied","Data":"e58cb2445728215f52ba900dcdfa41019de6abfd24e0889aa31e3d0d7d2c879e"} Oct 07 15:15:01 crc kubenswrapper[4672]: I1007 15:15:01.739116 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" event={"ID":"f018986c-bf8a-43ac-b89b-dfa0d276b31f","Type":"ContainerStarted","Data":"e1cdb94a7ba63fc173cf9ea3eeeeb34dbe1590edb7bed1a6fc5d10f8b534d7e1"} Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.059200 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.161457 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vmw4\" (UniqueName: \"kubernetes.io/projected/f018986c-bf8a-43ac-b89b-dfa0d276b31f-kube-api-access-2vmw4\") pod \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.161830 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f018986c-bf8a-43ac-b89b-dfa0d276b31f-secret-volume\") pod \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.161983 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f018986c-bf8a-43ac-b89b-dfa0d276b31f-config-volume\") pod \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\" (UID: \"f018986c-bf8a-43ac-b89b-dfa0d276b31f\") " Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.162593 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f018986c-bf8a-43ac-b89b-dfa0d276b31f-config-volume" (OuterVolumeSpecName: "config-volume") pod "f018986c-bf8a-43ac-b89b-dfa0d276b31f" (UID: "f018986c-bf8a-43ac-b89b-dfa0d276b31f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.167516 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f018986c-bf8a-43ac-b89b-dfa0d276b31f-kube-api-access-2vmw4" (OuterVolumeSpecName: "kube-api-access-2vmw4") pod "f018986c-bf8a-43ac-b89b-dfa0d276b31f" (UID: "f018986c-bf8a-43ac-b89b-dfa0d276b31f"). InnerVolumeSpecName "kube-api-access-2vmw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.167707 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f018986c-bf8a-43ac-b89b-dfa0d276b31f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f018986c-bf8a-43ac-b89b-dfa0d276b31f" (UID: "f018986c-bf8a-43ac-b89b-dfa0d276b31f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.264149 4672 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f018986c-bf8a-43ac-b89b-dfa0d276b31f-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.264183 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vmw4\" (UniqueName: \"kubernetes.io/projected/f018986c-bf8a-43ac-b89b-dfa0d276b31f-kube-api-access-2vmw4\") on node \"crc\" DevicePath \"\"" Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.264197 4672 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f018986c-bf8a-43ac-b89b-dfa0d276b31f-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.757331 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" event={"ID":"f018986c-bf8a-43ac-b89b-dfa0d276b31f","Type":"ContainerDied","Data":"e1cdb94a7ba63fc173cf9ea3eeeeb34dbe1590edb7bed1a6fc5d10f8b534d7e1"} Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.757407 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1cdb94a7ba63fc173cf9ea3eeeeb34dbe1590edb7bed1a6fc5d10f8b534d7e1" Oct 07 15:15:03 crc kubenswrapper[4672]: I1007 15:15:03.757417 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj" Oct 07 15:15:11 crc kubenswrapper[4672]: I1007 15:15:11.892665 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:15:11 crc kubenswrapper[4672]: E1007 15:15:11.893472 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:15:26 crc kubenswrapper[4672]: I1007 15:15:26.891302 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:15:26 crc kubenswrapper[4672]: E1007 15:15:26.891996 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:15:38 crc kubenswrapper[4672]: I1007 15:15:38.891936 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:15:38 crc kubenswrapper[4672]: E1007 15:15:38.892734 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:15:52 crc kubenswrapper[4672]: I1007 15:15:52.845580 4672 scope.go:117] "RemoveContainer" containerID="5407b9a6b501a2e247816e613344936f8e45cd5c30624c082e3af5902bf756b8" Oct 07 15:15:52 crc kubenswrapper[4672]: I1007 15:15:52.876392 4672 scope.go:117] "RemoveContainer" containerID="eba5ca58caf26d0b6293aa0888dd7ca75f6f8af626653ac40617f92a9fca5862" Oct 07 15:15:52 crc kubenswrapper[4672]: I1007 15:15:52.892976 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:15:52 crc kubenswrapper[4672]: E1007 15:15:52.893292 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:15:52 crc kubenswrapper[4672]: I1007 15:15:52.896935 4672 scope.go:117] "RemoveContainer" containerID="108a4dc4754471257133a71c713a8c0637673e0012918dd776e0219715559e3b" Oct 07 15:15:52 crc kubenswrapper[4672]: I1007 15:15:52.928229 4672 scope.go:117] "RemoveContainer" containerID="6ec4b4e568f4fb0d2c37640c1aeb7fb978a936312f2e72c53577af7cf8bb39a6" Oct 07 15:15:52 crc kubenswrapper[4672]: I1007 15:15:52.976040 4672 scope.go:117] "RemoveContainer" containerID="33b8d2cf3e62fb61f9dd03cdb1112c5db182a1e9d794889e3694cf9751f13d5e" Oct 07 15:15:53 crc kubenswrapper[4672]: I1007 15:15:53.001345 4672 scope.go:117] "RemoveContainer" containerID="9771fb9a1645c29b223683ad803761922b56903de431739534be10627b8efb41" Oct 07 15:16:06 crc kubenswrapper[4672]: I1007 15:16:06.892263 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:16:06 crc kubenswrapper[4672]: E1007 15:16:06.892969 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:16:16 crc kubenswrapper[4672]: I1007 15:16:16.366284 4672 generic.go:334] "Generic (PLEG): container finished" podID="4a1d79b4-a176-48af-9c78-59c7ddd39b71" containerID="6c489d452e0126b7666f7a353beee16d307cde07a04b92d59b33212b1e102a15" exitCode=0 Oct 07 15:16:16 crc kubenswrapper[4672]: I1007 15:16:16.366387 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" event={"ID":"4a1d79b4-a176-48af-9c78-59c7ddd39b71","Type":"ContainerDied","Data":"6c489d452e0126b7666f7a353beee16d307cde07a04b92d59b33212b1e102a15"} Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.763570 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.856468 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-ssh-key\") pod \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.856615 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-bootstrap-combined-ca-bundle\") pod \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.856723 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-inventory\") pod \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.856791 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhglx\" (UniqueName: \"kubernetes.io/projected/4a1d79b4-a176-48af-9c78-59c7ddd39b71-kube-api-access-fhglx\") pod \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.862343 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "4a1d79b4-a176-48af-9c78-59c7ddd39b71" (UID: "4a1d79b4-a176-48af-9c78-59c7ddd39b71"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.863190 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a1d79b4-a176-48af-9c78-59c7ddd39b71-kube-api-access-fhglx" (OuterVolumeSpecName: "kube-api-access-fhglx") pod "4a1d79b4-a176-48af-9c78-59c7ddd39b71" (UID: "4a1d79b4-a176-48af-9c78-59c7ddd39b71"). InnerVolumeSpecName "kube-api-access-fhglx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:16:17 crc kubenswrapper[4672]: E1007 15:16:17.882638 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-inventory podName:4a1d79b4-a176-48af-9c78-59c7ddd39b71 nodeName:}" failed. No retries permitted until 2025-10-07 15:16:18.382607629 +0000 UTC m=+1655.357786210 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-inventory") pod "4a1d79b4-a176-48af-9c78-59c7ddd39b71" (UID: "4a1d79b4-a176-48af-9c78-59c7ddd39b71") : error deleting /var/lib/kubelet/pods/4a1d79b4-a176-48af-9c78-59c7ddd39b71/volume-subpaths: remove /var/lib/kubelet/pods/4a1d79b4-a176-48af-9c78-59c7ddd39b71/volume-subpaths: no such file or directory Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.885139 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4a1d79b4-a176-48af-9c78-59c7ddd39b71" (UID: "4a1d79b4-a176-48af-9c78-59c7ddd39b71"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.959763 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.959798 4672 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:16:17 crc kubenswrapper[4672]: I1007 15:16:17.959812 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhglx\" (UniqueName: \"kubernetes.io/projected/4a1d79b4-a176-48af-9c78-59c7ddd39b71-kube-api-access-fhglx\") on node \"crc\" DevicePath \"\"" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.389770 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" event={"ID":"4a1d79b4-a176-48af-9c78-59c7ddd39b71","Type":"ContainerDied","Data":"d392a7801b41de9fecc2b9fc50a11e3df33bfcf0806b49529aedff7e16a968fb"} Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.389813 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d392a7801b41de9fecc2b9fc50a11e3df33bfcf0806b49529aedff7e16a968fb" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.389830 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.470281 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-inventory\") pod \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\" (UID: \"4a1d79b4-a176-48af-9c78-59c7ddd39b71\") " Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.471838 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn"] Oct 07 15:16:18 crc kubenswrapper[4672]: E1007 15:16:18.472304 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a1d79b4-a176-48af-9c78-59c7ddd39b71" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.472328 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a1d79b4-a176-48af-9c78-59c7ddd39b71" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 07 15:16:18 crc kubenswrapper[4672]: E1007 15:16:18.472359 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f018986c-bf8a-43ac-b89b-dfa0d276b31f" containerName="collect-profiles" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.472367 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f018986c-bf8a-43ac-b89b-dfa0d276b31f" containerName="collect-profiles" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.472592 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f018986c-bf8a-43ac-b89b-dfa0d276b31f" containerName="collect-profiles" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.472617 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a1d79b4-a176-48af-9c78-59c7ddd39b71" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.473608 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.481957 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-inventory" (OuterVolumeSpecName: "inventory") pod "4a1d79b4-a176-48af-9c78-59c7ddd39b71" (UID: "4a1d79b4-a176-48af-9c78-59c7ddd39b71"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.483850 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn"] Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.572730 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-5txmn\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.572854 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-5txmn\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.572947 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-787jj\" (UniqueName: \"kubernetes.io/projected/fa845c6c-c027-483f-b5e8-404778f6a1d4-kube-api-access-787jj\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-5txmn\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.572999 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a1d79b4-a176-48af-9c78-59c7ddd39b71-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.674433 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-5txmn\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.674577 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-5txmn\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.674722 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-787jj\" (UniqueName: \"kubernetes.io/projected/fa845c6c-c027-483f-b5e8-404778f6a1d4-kube-api-access-787jj\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-5txmn\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.678845 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-5txmn\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.680052 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-5txmn\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.694840 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-787jj\" (UniqueName: \"kubernetes.io/projected/fa845c6c-c027-483f-b5e8-404778f6a1d4-kube-api-access-787jj\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-5txmn\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.830306 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:16:18 crc kubenswrapper[4672]: I1007 15:16:18.892266 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:16:18 crc kubenswrapper[4672]: E1007 15:16:18.892543 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:16:19 crc kubenswrapper[4672]: I1007 15:16:19.340536 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn"] Oct 07 15:16:19 crc kubenswrapper[4672]: I1007 15:16:19.397638 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" event={"ID":"fa845c6c-c027-483f-b5e8-404778f6a1d4","Type":"ContainerStarted","Data":"31498ceefc7b6810bb6e411750b082c76ac617d431a04b52168eacc5fa0a40eb"} Oct 07 15:16:21 crc kubenswrapper[4672]: I1007 15:16:21.416083 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" event={"ID":"fa845c6c-c027-483f-b5e8-404778f6a1d4","Type":"ContainerStarted","Data":"79baf198a6e6e94f620604601694e9fc83c216f04c701cf312bb5ba3952c9993"} Oct 07 15:16:21 crc kubenswrapper[4672]: I1007 15:16:21.436730 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" podStartSLOduration=1.9725689389999999 podStartE2EDuration="3.436693252s" podCreationTimestamp="2025-10-07 15:16:18 +0000 UTC" firstStartedPulling="2025-10-07 15:16:19.347328111 +0000 UTC m=+1656.322506682" lastFinishedPulling="2025-10-07 15:16:20.811452404 +0000 UTC m=+1657.786630995" observedRunningTime="2025-10-07 15:16:21.433282882 +0000 UTC m=+1658.408461483" watchObservedRunningTime="2025-10-07 15:16:21.436693252 +0000 UTC m=+1658.411871833" Oct 07 15:16:29 crc kubenswrapper[4672]: I1007 15:16:29.892765 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:16:29 crc kubenswrapper[4672]: E1007 15:16:29.893687 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:16:41 crc kubenswrapper[4672]: I1007 15:16:41.892392 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:16:41 crc kubenswrapper[4672]: E1007 15:16:41.893677 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:16:53 crc kubenswrapper[4672]: I1007 15:16:53.068346 4672 scope.go:117] "RemoveContainer" containerID="b215712ffb49a2e42389528ae2cec1bc44d6d54f52b3488704fc7923f5abcc40" Oct 07 15:16:53 crc kubenswrapper[4672]: I1007 15:16:53.088546 4672 scope.go:117] "RemoveContainer" containerID="0b334c6caed1540482c291e6dd0bc5df3337d34dfcc3236b231b013e89e690a3" Oct 07 15:16:53 crc kubenswrapper[4672]: I1007 15:16:53.107376 4672 scope.go:117] "RemoveContainer" containerID="7bd0d6db242e9284384b73e28e4c0ad4c6cce8a899d6e7581a708d368cc5cbdc" Oct 07 15:16:56 crc kubenswrapper[4672]: I1007 15:16:56.891701 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:16:56 crc kubenswrapper[4672]: E1007 15:16:56.893129 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:17:10 crc kubenswrapper[4672]: I1007 15:17:10.036836 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-nddgp"] Oct 07 15:17:10 crc kubenswrapper[4672]: I1007 15:17:10.046572 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-nddgp"] Oct 07 15:17:10 crc kubenswrapper[4672]: I1007 15:17:10.892095 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:17:10 crc kubenswrapper[4672]: E1007 15:17:10.892605 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:17:11 crc kubenswrapper[4672]: I1007 15:17:11.902725 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47f8f609-ee6c-49c6-a617-9f258638926b" path="/var/lib/kubelet/pods/47f8f609-ee6c-49c6-a617-9f258638926b/volumes" Oct 07 15:17:13 crc kubenswrapper[4672]: I1007 15:17:13.037096 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-8sz2j"] Oct 07 15:17:13 crc kubenswrapper[4672]: I1007 15:17:13.055065 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-8sz2j"] Oct 07 15:17:13 crc kubenswrapper[4672]: I1007 15:17:13.902516 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5c70fe0-f6a3-4d99-b481-e252f5f4900f" path="/var/lib/kubelet/pods/f5c70fe0-f6a3-4d99-b481-e252f5f4900f/volumes" Oct 07 15:17:14 crc kubenswrapper[4672]: I1007 15:17:14.030824 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-h82gn"] Oct 07 15:17:14 crc kubenswrapper[4672]: I1007 15:17:14.041674 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-h82gn"] Oct 07 15:17:15 crc kubenswrapper[4672]: I1007 15:17:15.904995 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ac1a472-eb46-4de1-9833-3acb1d5ca8b9" path="/var/lib/kubelet/pods/5ac1a472-eb46-4de1-9833-3acb1d5ca8b9/volumes" Oct 07 15:17:19 crc kubenswrapper[4672]: I1007 15:17:19.025649 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-0c0d-account-create-b22tw"] Oct 07 15:17:19 crc kubenswrapper[4672]: I1007 15:17:19.033761 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-0c0d-account-create-b22tw"] Oct 07 15:17:19 crc kubenswrapper[4672]: I1007 15:17:19.901965 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c16c5b8c-8e95-470e-8864-09d7c6af3d07" path="/var/lib/kubelet/pods/c16c5b8c-8e95-470e-8864-09d7c6af3d07/volumes" Oct 07 15:17:24 crc kubenswrapper[4672]: I1007 15:17:24.039620 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-c94d-account-create-lr26x"] Oct 07 15:17:24 crc kubenswrapper[4672]: I1007 15:17:24.048363 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-e9f3-account-create-xrvl5"] Oct 07 15:17:24 crc kubenswrapper[4672]: I1007 15:17:24.057303 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-e9f3-account-create-xrvl5"] Oct 07 15:17:24 crc kubenswrapper[4672]: I1007 15:17:24.064201 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-c94d-account-create-lr26x"] Oct 07 15:17:24 crc kubenswrapper[4672]: I1007 15:17:24.892060 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:17:24 crc kubenswrapper[4672]: E1007 15:17:24.892805 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:17:25 crc kubenswrapper[4672]: I1007 15:17:25.902222 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f7b5c37-6793-455f-a313-0e620aac3401" path="/var/lib/kubelet/pods/4f7b5c37-6793-455f-a313-0e620aac3401/volumes" Oct 07 15:17:25 crc kubenswrapper[4672]: I1007 15:17:25.902711 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6552b769-4ac3-4f44-a977-6b7ccafe7df6" path="/var/lib/kubelet/pods/6552b769-4ac3-4f44-a977-6b7ccafe7df6/volumes" Oct 07 15:17:36 crc kubenswrapper[4672]: I1007 15:17:36.892384 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:17:36 crc kubenswrapper[4672]: E1007 15:17:36.894430 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:17:45 crc kubenswrapper[4672]: I1007 15:17:45.038392 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-nvjxx"] Oct 07 15:17:45 crc kubenswrapper[4672]: I1007 15:17:45.048087 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-8l95r"] Oct 07 15:17:45 crc kubenswrapper[4672]: I1007 15:17:45.056298 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-6fl7p"] Oct 07 15:17:45 crc kubenswrapper[4672]: I1007 15:17:45.063238 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-nvjxx"] Oct 07 15:17:45 crc kubenswrapper[4672]: I1007 15:17:45.070611 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-6fl7p"] Oct 07 15:17:45 crc kubenswrapper[4672]: I1007 15:17:45.077430 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-8l95r"] Oct 07 15:17:45 crc kubenswrapper[4672]: I1007 15:17:45.906054 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="303b2f6b-3e7b-4f25-b480-e2a74863215a" path="/var/lib/kubelet/pods/303b2f6b-3e7b-4f25-b480-e2a74863215a/volumes" Oct 07 15:17:45 crc kubenswrapper[4672]: I1007 15:17:45.906826 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4272a5a5-5cd2-4045-a1d7-735f5d8e3479" path="/var/lib/kubelet/pods/4272a5a5-5cd2-4045-a1d7-735f5d8e3479/volumes" Oct 07 15:17:45 crc kubenswrapper[4672]: I1007 15:17:45.907337 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d130321-61d8-41eb-972e-c0a9b7eed5e2" path="/var/lib/kubelet/pods/5d130321-61d8-41eb-972e-c0a9b7eed5e2/volumes" Oct 07 15:17:47 crc kubenswrapper[4672]: I1007 15:17:47.037141 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-4bcjh"] Oct 07 15:17:47 crc kubenswrapper[4672]: I1007 15:17:47.058608 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-4bcjh"] Oct 07 15:17:47 crc kubenswrapper[4672]: I1007 15:17:47.901728 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3158784-bab1-4e87-957c-cce0a7180f6f" path="/var/lib/kubelet/pods/c3158784-bab1-4e87-957c-cce0a7180f6f/volumes" Oct 07 15:17:48 crc kubenswrapper[4672]: I1007 15:17:48.892276 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:17:48 crc kubenswrapper[4672]: E1007 15:17:48.893153 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:17:52 crc kubenswrapper[4672]: I1007 15:17:52.024687 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-qw6h4"] Oct 07 15:17:52 crc kubenswrapper[4672]: I1007 15:17:52.030917 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-qw6h4"] Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.178608 4672 scope.go:117] "RemoveContainer" containerID="7d0553f00eefaa6c52e0d01829263676537f3e46f440ed86ba734093b2bb0c78" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.207928 4672 scope.go:117] "RemoveContainer" containerID="497b4799b70c212352a3a49ddafe22596bd73fcf2c6bb07fac6e9dc577dcdfda" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.256997 4672 scope.go:117] "RemoveContainer" containerID="cb3935663bce77023993755d95c01c007a5b46217b9dcbf39d52c4afca178b15" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.328849 4672 scope.go:117] "RemoveContainer" containerID="7423cb2a455bbf6eef147f2e0878691720cb299980732775460bfcee254b6b98" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.348888 4672 scope.go:117] "RemoveContainer" containerID="3e6b9268b83fa2d07c62ead3ad7808f8b1939e3e2a2460afe736e72e3c3cd463" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.399887 4672 scope.go:117] "RemoveContainer" containerID="62ffe44deb835c7dfd59342fc879925d7f6d54d26c2f2e755fb22a37d54bf09f" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.445092 4672 scope.go:117] "RemoveContainer" containerID="f8b60223341c2dfe46e0a38bed3d5f53a96c67ebbdbc8548a38de564f9d625aa" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.469696 4672 scope.go:117] "RemoveContainer" containerID="55a3b5e6469791eec39168304d3db18cadae65df906ec7bddc279c718b10a8af" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.490228 4672 scope.go:117] "RemoveContainer" containerID="8ff9979e84c7327d81511b9865c08b4f9a7cadf02c1ae8e913ee3b5a9e999912" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.510708 4672 scope.go:117] "RemoveContainer" containerID="64abc7d214a6cc5bb62915bec45e54fd620d629947970f50dcabd90db3ee7721" Oct 07 15:17:53 crc kubenswrapper[4672]: I1007 15:17:53.904589 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c36d7f75-c0e2-4884-9556-e2578247b813" path="/var/lib/kubelet/pods/c36d7f75-c0e2-4884-9556-e2578247b813/volumes" Oct 07 15:18:03 crc kubenswrapper[4672]: I1007 15:18:03.364154 4672 generic.go:334] "Generic (PLEG): container finished" podID="fa845c6c-c027-483f-b5e8-404778f6a1d4" containerID="79baf198a6e6e94f620604601694e9fc83c216f04c701cf312bb5ba3952c9993" exitCode=0 Oct 07 15:18:03 crc kubenswrapper[4672]: I1007 15:18:03.364235 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" event={"ID":"fa845c6c-c027-483f-b5e8-404778f6a1d4","Type":"ContainerDied","Data":"79baf198a6e6e94f620604601694e9fc83c216f04c701cf312bb5ba3952c9993"} Oct 07 15:18:03 crc kubenswrapper[4672]: I1007 15:18:03.899410 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:18:03 crc kubenswrapper[4672]: E1007 15:18:03.899731 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.763454 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.865590 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-787jj\" (UniqueName: \"kubernetes.io/projected/fa845c6c-c027-483f-b5e8-404778f6a1d4-kube-api-access-787jj\") pod \"fa845c6c-c027-483f-b5e8-404778f6a1d4\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.865761 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-ssh-key\") pod \"fa845c6c-c027-483f-b5e8-404778f6a1d4\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.865814 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-inventory\") pod \"fa845c6c-c027-483f-b5e8-404778f6a1d4\" (UID: \"fa845c6c-c027-483f-b5e8-404778f6a1d4\") " Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.871261 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa845c6c-c027-483f-b5e8-404778f6a1d4-kube-api-access-787jj" (OuterVolumeSpecName: "kube-api-access-787jj") pod "fa845c6c-c027-483f-b5e8-404778f6a1d4" (UID: "fa845c6c-c027-483f-b5e8-404778f6a1d4"). InnerVolumeSpecName "kube-api-access-787jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.893758 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fa845c6c-c027-483f-b5e8-404778f6a1d4" (UID: "fa845c6c-c027-483f-b5e8-404778f6a1d4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.896374 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-inventory" (OuterVolumeSpecName: "inventory") pod "fa845c6c-c027-483f-b5e8-404778f6a1d4" (UID: "fa845c6c-c027-483f-b5e8-404778f6a1d4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.967817 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-787jj\" (UniqueName: \"kubernetes.io/projected/fa845c6c-c027-483f-b5e8-404778f6a1d4-kube-api-access-787jj\") on node \"crc\" DevicePath \"\"" Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.967847 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:18:04 crc kubenswrapper[4672]: I1007 15:18:04.967856 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa845c6c-c027-483f-b5e8-404778f6a1d4-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.381329 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" event={"ID":"fa845c6c-c027-483f-b5e8-404778f6a1d4","Type":"ContainerDied","Data":"31498ceefc7b6810bb6e411750b082c76ac617d431a04b52168eacc5fa0a40eb"} Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.381637 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31498ceefc7b6810bb6e411750b082c76ac617d431a04b52168eacc5fa0a40eb" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.381409 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-5txmn" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.462249 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z"] Oct 07 15:18:05 crc kubenswrapper[4672]: E1007 15:18:05.462677 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa845c6c-c027-483f-b5e8-404778f6a1d4" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.462698 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa845c6c-c027-483f-b5e8-404778f6a1d4" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.462859 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa845c6c-c027-483f-b5e8-404778f6a1d4" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.463465 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.465590 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.465807 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.467641 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.473346 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z"] Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.477398 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.578963 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l25k\" (UniqueName: \"kubernetes.io/projected/a9f7823a-974b-4ef1-9414-f1aac7bd2179-kube-api-access-6l25k\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.579100 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.579151 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.680886 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.680976 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.681075 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l25k\" (UniqueName: \"kubernetes.io/projected/a9f7823a-974b-4ef1-9414-f1aac7bd2179-kube-api-access-6l25k\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.684904 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.695981 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.703121 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l25k\" (UniqueName: \"kubernetes.io/projected/a9f7823a-974b-4ef1-9414-f1aac7bd2179-kube-api-access-6l25k\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:05 crc kubenswrapper[4672]: I1007 15:18:05.816190 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:18:06 crc kubenswrapper[4672]: I1007 15:18:06.330215 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z"] Oct 07 15:18:06 crc kubenswrapper[4672]: I1007 15:18:06.333496 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 15:18:06 crc kubenswrapper[4672]: I1007 15:18:06.391129 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" event={"ID":"a9f7823a-974b-4ef1-9414-f1aac7bd2179","Type":"ContainerStarted","Data":"2e3a97736fc05e55f284af3cd4cd7ee78b0e9d2ee92f1370911153bdbd0ee0bc"} Oct 07 15:18:07 crc kubenswrapper[4672]: I1007 15:18:07.401070 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" event={"ID":"a9f7823a-974b-4ef1-9414-f1aac7bd2179","Type":"ContainerStarted","Data":"99e31954fb2000cc3ea64c022ccb416e784d880cab4a52cf7c6be0aec76ebab9"} Oct 07 15:18:07 crc kubenswrapper[4672]: I1007 15:18:07.421796 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" podStartSLOduration=1.974279278 podStartE2EDuration="2.421777287s" podCreationTimestamp="2025-10-07 15:18:05 +0000 UTC" firstStartedPulling="2025-10-07 15:18:06.333282941 +0000 UTC m=+1763.308461522" lastFinishedPulling="2025-10-07 15:18:06.78078095 +0000 UTC m=+1763.755959531" observedRunningTime="2025-10-07 15:18:07.4174029 +0000 UTC m=+1764.392581481" watchObservedRunningTime="2025-10-07 15:18:07.421777287 +0000 UTC m=+1764.396955868" Oct 07 15:18:13 crc kubenswrapper[4672]: I1007 15:18:13.035678 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8124-account-create-z59nd"] Oct 07 15:18:13 crc kubenswrapper[4672]: I1007 15:18:13.046071 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8124-account-create-z59nd"] Oct 07 15:18:13 crc kubenswrapper[4672]: I1007 15:18:13.902644 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cffe4dc-e78b-4b0e-a3cc-093777856e62" path="/var/lib/kubelet/pods/8cffe4dc-e78b-4b0e-a3cc-093777856e62/volumes" Oct 07 15:18:14 crc kubenswrapper[4672]: I1007 15:18:14.030296 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-913d-account-create-62bjt"] Oct 07 15:18:14 crc kubenswrapper[4672]: I1007 15:18:14.042200 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-5189-account-create-d5h2g"] Oct 07 15:18:14 crc kubenswrapper[4672]: I1007 15:18:14.052511 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-5189-account-create-d5h2g"] Oct 07 15:18:14 crc kubenswrapper[4672]: I1007 15:18:14.059157 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-913d-account-create-62bjt"] Oct 07 15:18:14 crc kubenswrapper[4672]: I1007 15:18:14.891699 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:18:14 crc kubenswrapper[4672]: E1007 15:18:14.895794 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:18:15 crc kubenswrapper[4672]: I1007 15:18:15.907669 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bab44d9-a425-4213-ba3d-58d6a976da25" path="/var/lib/kubelet/pods/3bab44d9-a425-4213-ba3d-58d6a976da25/volumes" Oct 07 15:18:15 crc kubenswrapper[4672]: I1007 15:18:15.908694 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce3ab335-518f-4c6e-8651-894af34abdd7" path="/var/lib/kubelet/pods/ce3ab335-518f-4c6e-8651-894af34abdd7/volumes" Oct 07 15:18:18 crc kubenswrapper[4672]: I1007 15:18:18.029870 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-lsj7z"] Oct 07 15:18:18 crc kubenswrapper[4672]: I1007 15:18:18.037721 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-lsj7z"] Oct 07 15:18:19 crc kubenswrapper[4672]: I1007 15:18:19.904924 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b" path="/var/lib/kubelet/pods/e32e5c0f-b0e2-46fa-8672-08a4b6bc9e2b/volumes" Oct 07 15:18:26 crc kubenswrapper[4672]: I1007 15:18:26.891455 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:18:26 crc kubenswrapper[4672]: E1007 15:18:26.892316 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:18:33 crc kubenswrapper[4672]: I1007 15:18:33.065427 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-lzmcv"] Oct 07 15:18:33 crc kubenswrapper[4672]: I1007 15:18:33.074824 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-lzmcv"] Oct 07 15:18:33 crc kubenswrapper[4672]: I1007 15:18:33.905160 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c3d2245-20b7-4ef1-a2ba-9548d4c5d017" path="/var/lib/kubelet/pods/8c3d2245-20b7-4ef1-a2ba-9548d4c5d017/volumes" Oct 07 15:18:37 crc kubenswrapper[4672]: I1007 15:18:37.892039 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:18:37 crc kubenswrapper[4672]: E1007 15:18:37.892594 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:18:45 crc kubenswrapper[4672]: I1007 15:18:45.026856 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-4ss8w"] Oct 07 15:18:45 crc kubenswrapper[4672]: I1007 15:18:45.035739 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-4ss8w"] Oct 07 15:18:45 crc kubenswrapper[4672]: I1007 15:18:45.901985 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1510beef-3f92-4462-90a7-e1b2a92df211" path="/var/lib/kubelet/pods/1510beef-3f92-4462-90a7-e1b2a92df211/volumes" Oct 07 15:18:50 crc kubenswrapper[4672]: I1007 15:18:50.901616 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:18:50 crc kubenswrapper[4672]: E1007 15:18:50.902405 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:18:53 crc kubenswrapper[4672]: I1007 15:18:53.699251 4672 scope.go:117] "RemoveContainer" containerID="75f2292253e63b298e3f1708d7a9faa1ac3efd45a491d9cbd4d1ea30b7a48b85" Oct 07 15:18:53 crc kubenswrapper[4672]: I1007 15:18:53.729719 4672 scope.go:117] "RemoveContainer" containerID="923183dbf95f906c6d05b13729d7d3332cb2a0a1035bd95fbf82228580824632" Oct 07 15:18:53 crc kubenswrapper[4672]: I1007 15:18:53.774130 4672 scope.go:117] "RemoveContainer" containerID="ce7a7e486044de9cef0757c05694a39f331e017ca86ae3981012256e0b58fb74" Oct 07 15:18:53 crc kubenswrapper[4672]: I1007 15:18:53.827178 4672 scope.go:117] "RemoveContainer" containerID="9e8c3b07eb4f9979b6ff8a8d1b44efa11edb03adfb18e7e22b48a3b8a0aab6b7" Oct 07 15:18:53 crc kubenswrapper[4672]: I1007 15:18:53.869632 4672 scope.go:117] "RemoveContainer" containerID="bd18ea38c2e6d705d39b63cbf64b2d7043480f4e99d32e92a6091f4966d691e2" Oct 07 15:18:53 crc kubenswrapper[4672]: I1007 15:18:53.942218 4672 scope.go:117] "RemoveContainer" containerID="8bafe4d85674d79b133b904440584276e23ffe5fdd37fe4308b14043556e5956" Oct 07 15:18:53 crc kubenswrapper[4672]: I1007 15:18:53.978156 4672 scope.go:117] "RemoveContainer" containerID="0b6b606be159db280bbd37b34ac9a9fda500280c4b1c629158985a516035c5de" Oct 07 15:19:00 crc kubenswrapper[4672]: I1007 15:19:00.059748 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-kqgxx"] Oct 07 15:19:00 crc kubenswrapper[4672]: I1007 15:19:00.068700 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-kqgxx"] Oct 07 15:19:01 crc kubenswrapper[4672]: I1007 15:19:01.903662 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62307f94-c8e8-4781-a675-0951f6e1d797" path="/var/lib/kubelet/pods/62307f94-c8e8-4781-a675-0951f6e1d797/volumes" Oct 07 15:19:02 crc kubenswrapper[4672]: I1007 15:19:02.891184 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:19:02 crc kubenswrapper[4672]: E1007 15:19:02.891464 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:19:16 crc kubenswrapper[4672]: I1007 15:19:16.052639 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-2j8d8"] Oct 07 15:19:16 crc kubenswrapper[4672]: I1007 15:19:16.064254 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-2j8d8"] Oct 07 15:19:16 crc kubenswrapper[4672]: I1007 15:19:16.891381 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:19:16 crc kubenswrapper[4672]: E1007 15:19:16.891605 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:19:17 crc kubenswrapper[4672]: I1007 15:19:17.902129 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d702410-d569-45e2-9f92-b8e0c7a0fd9d" path="/var/lib/kubelet/pods/1d702410-d569-45e2-9f92-b8e0c7a0fd9d/volumes" Oct 07 15:19:23 crc kubenswrapper[4672]: I1007 15:19:23.029903 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-pkll5"] Oct 07 15:19:23 crc kubenswrapper[4672]: I1007 15:19:23.039761 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-pkll5"] Oct 07 15:19:23 crc kubenswrapper[4672]: I1007 15:19:23.905046 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="294a14e2-fc34-497e-b003-f82a21411703" path="/var/lib/kubelet/pods/294a14e2-fc34-497e-b003-f82a21411703/volumes" Oct 07 15:19:24 crc kubenswrapper[4672]: I1007 15:19:24.029964 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-89g5s"] Oct 07 15:19:24 crc kubenswrapper[4672]: I1007 15:19:24.041412 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-mc8ns"] Oct 07 15:19:24 crc kubenswrapper[4672]: I1007 15:19:24.059233 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-89g5s"] Oct 07 15:19:24 crc kubenswrapper[4672]: I1007 15:19:24.060324 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-mc8ns"] Oct 07 15:19:25 crc kubenswrapper[4672]: I1007 15:19:25.902449 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="274cd1ef-76a6-45f3-ad79-690178d34d7b" path="/var/lib/kubelet/pods/274cd1ef-76a6-45f3-ad79-690178d34d7b/volumes" Oct 07 15:19:25 crc kubenswrapper[4672]: I1007 15:19:25.903395 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55b482fc-c341-4d8f-aec4-ab97e93573e8" path="/var/lib/kubelet/pods/55b482fc-c341-4d8f-aec4-ab97e93573e8/volumes" Oct 07 15:19:29 crc kubenswrapper[4672]: I1007 15:19:29.891710 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:19:31 crc kubenswrapper[4672]: I1007 15:19:31.172218 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"1f0049b9a568b508db3acc2c4e9d42f1ee71b196cad6504e841571287e2104a4"} Oct 07 15:19:32 crc kubenswrapper[4672]: I1007 15:19:32.035741 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-c54c-account-create-q95c7"] Oct 07 15:19:32 crc kubenswrapper[4672]: I1007 15:19:32.043261 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-c54c-account-create-q95c7"] Oct 07 15:19:33 crc kubenswrapper[4672]: I1007 15:19:33.901682 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27bd3313-2b8f-4ae9-acc8-f88633249dd9" path="/var/lib/kubelet/pods/27bd3313-2b8f-4ae9-acc8-f88633249dd9/volumes" Oct 07 15:19:40 crc kubenswrapper[4672]: I1007 15:19:40.258268 4672 generic.go:334] "Generic (PLEG): container finished" podID="a9f7823a-974b-4ef1-9414-f1aac7bd2179" containerID="99e31954fb2000cc3ea64c022ccb416e784d880cab4a52cf7c6be0aec76ebab9" exitCode=0 Oct 07 15:19:40 crc kubenswrapper[4672]: I1007 15:19:40.258351 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" event={"ID":"a9f7823a-974b-4ef1-9414-f1aac7bd2179","Type":"ContainerDied","Data":"99e31954fb2000cc3ea64c022ccb416e784d880cab4a52cf7c6be0aec76ebab9"} Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.665873 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.805625 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-inventory\") pod \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.805777 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6l25k\" (UniqueName: \"kubernetes.io/projected/a9f7823a-974b-4ef1-9414-f1aac7bd2179-kube-api-access-6l25k\") pod \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.805800 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-ssh-key\") pod \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\" (UID: \"a9f7823a-974b-4ef1-9414-f1aac7bd2179\") " Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.814633 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9f7823a-974b-4ef1-9414-f1aac7bd2179-kube-api-access-6l25k" (OuterVolumeSpecName: "kube-api-access-6l25k") pod "a9f7823a-974b-4ef1-9414-f1aac7bd2179" (UID: "a9f7823a-974b-4ef1-9414-f1aac7bd2179"). InnerVolumeSpecName "kube-api-access-6l25k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.835200 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a9f7823a-974b-4ef1-9414-f1aac7bd2179" (UID: "a9f7823a-974b-4ef1-9414-f1aac7bd2179"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.835841 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-inventory" (OuterVolumeSpecName: "inventory") pod "a9f7823a-974b-4ef1-9414-f1aac7bd2179" (UID: "a9f7823a-974b-4ef1-9414-f1aac7bd2179"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.907305 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6l25k\" (UniqueName: \"kubernetes.io/projected/a9f7823a-974b-4ef1-9414-f1aac7bd2179-kube-api-access-6l25k\") on node \"crc\" DevicePath \"\"" Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.907337 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:19:41 crc kubenswrapper[4672]: I1007 15:19:41.907350 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9f7823a-974b-4ef1-9414-f1aac7bd2179-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.275608 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" event={"ID":"a9f7823a-974b-4ef1-9414-f1aac7bd2179","Type":"ContainerDied","Data":"2e3a97736fc05e55f284af3cd4cd7ee78b0e9d2ee92f1370911153bdbd0ee0bc"} Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.275734 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e3a97736fc05e55f284af3cd4cd7ee78b0e9d2ee92f1370911153bdbd0ee0bc" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.275789 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.358234 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv"] Oct 07 15:19:42 crc kubenswrapper[4672]: E1007 15:19:42.358630 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9f7823a-974b-4ef1-9414-f1aac7bd2179" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.358652 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9f7823a-974b-4ef1-9414-f1aac7bd2179" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.358849 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9f7823a-974b-4ef1-9414-f1aac7bd2179" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.359452 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.366243 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.366336 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.366433 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.366592 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.374256 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv"] Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.518326 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.518544 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.518650 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb252\" (UniqueName: \"kubernetes.io/projected/6b18bc85-3a86-4989-a2b2-dd34d127023a-kube-api-access-cb252\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.620642 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.620753 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.620801 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb252\" (UniqueName: \"kubernetes.io/projected/6b18bc85-3a86-4989-a2b2-dd34d127023a-kube-api-access-cb252\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.624510 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.627647 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.636692 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb252\" (UniqueName: \"kubernetes.io/projected/6b18bc85-3a86-4989-a2b2-dd34d127023a-kube-api-access-cb252\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:42 crc kubenswrapper[4672]: I1007 15:19:42.678147 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:43 crc kubenswrapper[4672]: I1007 15:19:43.031076 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cf8d-account-create-lfxbd"] Oct 07 15:19:43 crc kubenswrapper[4672]: I1007 15:19:43.041353 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-b437-account-create-pvlh9"] Oct 07 15:19:43 crc kubenswrapper[4672]: I1007 15:19:43.047418 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cf8d-account-create-lfxbd"] Oct 07 15:19:43 crc kubenswrapper[4672]: I1007 15:19:43.053228 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-b437-account-create-pvlh9"] Oct 07 15:19:43 crc kubenswrapper[4672]: I1007 15:19:43.203455 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv"] Oct 07 15:19:43 crc kubenswrapper[4672]: I1007 15:19:43.285360 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" event={"ID":"6b18bc85-3a86-4989-a2b2-dd34d127023a","Type":"ContainerStarted","Data":"b4f303362ac674d4d4261406cefe0bb7b5096078b5dabc2ec4a816284c11e4c3"} Oct 07 15:19:43 crc kubenswrapper[4672]: I1007 15:19:43.903431 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e1ba3cb-b673-4203-8790-2561949aa72f" path="/var/lib/kubelet/pods/1e1ba3cb-b673-4203-8790-2561949aa72f/volumes" Oct 07 15:19:43 crc kubenswrapper[4672]: I1007 15:19:43.904513 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc280071-0df1-46dd-8d72-c48efe297e6c" path="/var/lib/kubelet/pods/bc280071-0df1-46dd-8d72-c48efe297e6c/volumes" Oct 07 15:19:44 crc kubenswrapper[4672]: I1007 15:19:44.298133 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" event={"ID":"6b18bc85-3a86-4989-a2b2-dd34d127023a","Type":"ContainerStarted","Data":"115c9cc5e3d91fefe4e3eeb6cf9ece7bfad2ec32852662aced3a915f79d167f4"} Oct 07 15:19:44 crc kubenswrapper[4672]: I1007 15:19:44.320081 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" podStartSLOduration=1.813426651 podStartE2EDuration="2.32006131s" podCreationTimestamp="2025-10-07 15:19:42 +0000 UTC" firstStartedPulling="2025-10-07 15:19:43.209848688 +0000 UTC m=+1860.185027269" lastFinishedPulling="2025-10-07 15:19:43.716483347 +0000 UTC m=+1860.691661928" observedRunningTime="2025-10-07 15:19:44.313932701 +0000 UTC m=+1861.289111272" watchObservedRunningTime="2025-10-07 15:19:44.32006131 +0000 UTC m=+1861.295239891" Oct 07 15:19:49 crc kubenswrapper[4672]: I1007 15:19:49.334963 4672 generic.go:334] "Generic (PLEG): container finished" podID="6b18bc85-3a86-4989-a2b2-dd34d127023a" containerID="115c9cc5e3d91fefe4e3eeb6cf9ece7bfad2ec32852662aced3a915f79d167f4" exitCode=0 Oct 07 15:19:49 crc kubenswrapper[4672]: I1007 15:19:49.335457 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" event={"ID":"6b18bc85-3a86-4989-a2b2-dd34d127023a","Type":"ContainerDied","Data":"115c9cc5e3d91fefe4e3eeb6cf9ece7bfad2ec32852662aced3a915f79d167f4"} Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.716536 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.876675 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-ssh-key\") pod \"6b18bc85-3a86-4989-a2b2-dd34d127023a\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.876764 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cb252\" (UniqueName: \"kubernetes.io/projected/6b18bc85-3a86-4989-a2b2-dd34d127023a-kube-api-access-cb252\") pod \"6b18bc85-3a86-4989-a2b2-dd34d127023a\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.876929 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-inventory\") pod \"6b18bc85-3a86-4989-a2b2-dd34d127023a\" (UID: \"6b18bc85-3a86-4989-a2b2-dd34d127023a\") " Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.881978 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b18bc85-3a86-4989-a2b2-dd34d127023a-kube-api-access-cb252" (OuterVolumeSpecName: "kube-api-access-cb252") pod "6b18bc85-3a86-4989-a2b2-dd34d127023a" (UID: "6b18bc85-3a86-4989-a2b2-dd34d127023a"). InnerVolumeSpecName "kube-api-access-cb252". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.904247 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6b18bc85-3a86-4989-a2b2-dd34d127023a" (UID: "6b18bc85-3a86-4989-a2b2-dd34d127023a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.904261 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-inventory" (OuterVolumeSpecName: "inventory") pod "6b18bc85-3a86-4989-a2b2-dd34d127023a" (UID: "6b18bc85-3a86-4989-a2b2-dd34d127023a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.978772 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.978802 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b18bc85-3a86-4989-a2b2-dd34d127023a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:19:50 crc kubenswrapper[4672]: I1007 15:19:50.978811 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cb252\" (UniqueName: \"kubernetes.io/projected/6b18bc85-3a86-4989-a2b2-dd34d127023a-kube-api-access-cb252\") on node \"crc\" DevicePath \"\"" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.353406 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" event={"ID":"6b18bc85-3a86-4989-a2b2-dd34d127023a","Type":"ContainerDied","Data":"b4f303362ac674d4d4261406cefe0bb7b5096078b5dabc2ec4a816284c11e4c3"} Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.353736 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4f303362ac674d4d4261406cefe0bb7b5096078b5dabc2ec4a816284c11e4c3" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.353481 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.417350 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79"] Oct 07 15:19:51 crc kubenswrapper[4672]: E1007 15:19:51.417723 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b18bc85-3a86-4989-a2b2-dd34d127023a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.417743 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b18bc85-3a86-4989-a2b2-dd34d127023a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.417939 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b18bc85-3a86-4989-a2b2-dd34d127023a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.418616 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.420391 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.420677 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.420719 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.420799 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.437765 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79"] Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.589884 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-x9l79\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.590100 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62p52\" (UniqueName: \"kubernetes.io/projected/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-kube-api-access-62p52\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-x9l79\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.590142 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-x9l79\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.692673 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62p52\" (UniqueName: \"kubernetes.io/projected/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-kube-api-access-62p52\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-x9l79\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.692761 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-x9l79\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.692835 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-x9l79\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.698302 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-x9l79\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.699868 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-x9l79\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.713789 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62p52\" (UniqueName: \"kubernetes.io/projected/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-kube-api-access-62p52\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-x9l79\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:51 crc kubenswrapper[4672]: I1007 15:19:51.742144 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:19:52 crc kubenswrapper[4672]: I1007 15:19:52.334998 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79"] Oct 07 15:19:52 crc kubenswrapper[4672]: I1007 15:19:52.362409 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" event={"ID":"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08","Type":"ContainerStarted","Data":"1f4c5af6e54f43c2c621401f03e3f1e96e40bcc4539e5e3668b9f1fefa8ee517"} Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.133914 4672 scope.go:117] "RemoveContainer" containerID="865df58d2dda1bec8ab7581bccb70148613e8ede07d0b4ad84846053d02c4bec" Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.156389 4672 scope.go:117] "RemoveContainer" containerID="9c1b7312a361c94fb586737ef34e50131f7efa6f2d21992ce511ca29c9aa14f9" Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.223295 4672 scope.go:117] "RemoveContainer" containerID="db12a9de8539ba1a88463b35d19faa9c4c6aa8d220bae12a7f2cee228bdfbadc" Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.297310 4672 scope.go:117] "RemoveContainer" containerID="e65a67703af4d8814c91d9faab26e202d32eb34eaac035fb55fbafa0fb73452f" Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.355144 4672 scope.go:117] "RemoveContainer" containerID="04cad2e1ddba4e4df454bea7f0831d97dd98275d849a79038c71da6c197da5df" Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.378410 4672 scope.go:117] "RemoveContainer" containerID="0f069ee60985b3831f377ec71cfc25ca7d4608545cd43bb174713b71cf0108f7" Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.379919 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" event={"ID":"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08","Type":"ContainerStarted","Data":"5fee830bf8d06ce71a4f15997f7effc2bd58f5ae398fcd78ed563f0d4cdce910"} Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.410430 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" podStartSLOduration=2.580307496 podStartE2EDuration="3.410406595s" podCreationTimestamp="2025-10-07 15:19:51 +0000 UTC" firstStartedPulling="2025-10-07 15:19:52.339245235 +0000 UTC m=+1869.314423816" lastFinishedPulling="2025-10-07 15:19:53.169344334 +0000 UTC m=+1870.144522915" observedRunningTime="2025-10-07 15:19:54.400212127 +0000 UTC m=+1871.375390708" watchObservedRunningTime="2025-10-07 15:19:54.410406595 +0000 UTC m=+1871.385585176" Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.419885 4672 scope.go:117] "RemoveContainer" containerID="566b8976a2d8ea8c854f6fd364fd9630aee1fc49d6ab2d9adfeba86d7df1a873" Oct 07 15:19:54 crc kubenswrapper[4672]: I1007 15:19:54.470495 4672 scope.go:117] "RemoveContainer" containerID="f39292180daf5766d19b71f5347c6d0fd06fbbc9e90f22bd63326b920d279326" Oct 07 15:20:05 crc kubenswrapper[4672]: I1007 15:20:05.042249 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-pz2ts"] Oct 07 15:20:05 crc kubenswrapper[4672]: I1007 15:20:05.050728 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-pz2ts"] Oct 07 15:20:05 crc kubenswrapper[4672]: I1007 15:20:05.902039 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a5bc8d8-deb5-46de-a6f1-c90f36e71104" path="/var/lib/kubelet/pods/2a5bc8d8-deb5-46de-a6f1-c90f36e71104/volumes" Oct 07 15:20:28 crc kubenswrapper[4672]: I1007 15:20:28.036540 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-fc94m"] Oct 07 15:20:28 crc kubenswrapper[4672]: I1007 15:20:28.043696 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-fc94m"] Oct 07 15:20:29 crc kubenswrapper[4672]: I1007 15:20:29.681919 4672 generic.go:334] "Generic (PLEG): container finished" podID="3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08" containerID="5fee830bf8d06ce71a4f15997f7effc2bd58f5ae398fcd78ed563f0d4cdce910" exitCode=0 Oct 07 15:20:29 crc kubenswrapper[4672]: I1007 15:20:29.682007 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" event={"ID":"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08","Type":"ContainerDied","Data":"5fee830bf8d06ce71a4f15997f7effc2bd58f5ae398fcd78ed563f0d4cdce910"} Oct 07 15:20:29 crc kubenswrapper[4672]: I1007 15:20:29.905632 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12eb9041-9bc5-40ec-8d6e-8c2177b8acda" path="/var/lib/kubelet/pods/12eb9041-9bc5-40ec-8d6e-8c2177b8acda/volumes" Oct 07 15:20:30 crc kubenswrapper[4672]: I1007 15:20:30.027621 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5dl6k"] Oct 07 15:20:30 crc kubenswrapper[4672]: I1007 15:20:30.037999 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5dl6k"] Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.103106 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.264153 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-ssh-key\") pod \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.264239 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62p52\" (UniqueName: \"kubernetes.io/projected/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-kube-api-access-62p52\") pod \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.264274 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-inventory\") pod \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\" (UID: \"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08\") " Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.270785 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-kube-api-access-62p52" (OuterVolumeSpecName: "kube-api-access-62p52") pod "3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08" (UID: "3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08"). InnerVolumeSpecName "kube-api-access-62p52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.296692 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08" (UID: "3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.305229 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-inventory" (OuterVolumeSpecName: "inventory") pod "3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08" (UID: "3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.367842 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.367872 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62p52\" (UniqueName: \"kubernetes.io/projected/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-kube-api-access-62p52\") on node \"crc\" DevicePath \"\"" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.367883 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.698833 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" event={"ID":"3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08","Type":"ContainerDied","Data":"1f4c5af6e54f43c2c621401f03e3f1e96e40bcc4539e5e3668b9f1fefa8ee517"} Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.698884 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f4c5af6e54f43c2c621401f03e3f1e96e40bcc4539e5e3668b9f1fefa8ee517" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.698891 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-x9l79" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.780213 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz"] Oct 07 15:20:31 crc kubenswrapper[4672]: E1007 15:20:31.780839 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.780907 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.781182 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.782000 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.784363 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.784630 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.784677 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.784990 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.791590 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz"] Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.876732 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-df6lz\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.876833 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8vwg\" (UniqueName: \"kubernetes.io/projected/fa45d7c4-d068-4df1-847b-589c7061b6e1-kube-api-access-v8vwg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-df6lz\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.876853 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-df6lz\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.903362 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e50c0a4-dded-4ae4-9b1d-a3e06c43c529" path="/var/lib/kubelet/pods/7e50c0a4-dded-4ae4-9b1d-a3e06c43c529/volumes" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.978596 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-df6lz\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.978699 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8vwg\" (UniqueName: \"kubernetes.io/projected/fa45d7c4-d068-4df1-847b-589c7061b6e1-kube-api-access-v8vwg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-df6lz\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.978726 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-df6lz\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.984141 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-df6lz\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.984431 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-df6lz\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:31 crc kubenswrapper[4672]: I1007 15:20:31.998150 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8vwg\" (UniqueName: \"kubernetes.io/projected/fa45d7c4-d068-4df1-847b-589c7061b6e1-kube-api-access-v8vwg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-df6lz\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:32 crc kubenswrapper[4672]: I1007 15:20:32.105950 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:20:32 crc kubenswrapper[4672]: I1007 15:20:32.572696 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz"] Oct 07 15:20:32 crc kubenswrapper[4672]: I1007 15:20:32.707898 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" event={"ID":"fa45d7c4-d068-4df1-847b-589c7061b6e1","Type":"ContainerStarted","Data":"86a09c763f205342ef27f63c997ebda1e7310600e4b1a5894d01ffe3a6c4f0e2"} Oct 07 15:20:33 crc kubenswrapper[4672]: I1007 15:20:33.722602 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" event={"ID":"fa45d7c4-d068-4df1-847b-589c7061b6e1","Type":"ContainerStarted","Data":"965eed4fcf86310bb4920012cd9e2b578aae05795b750d7c0d57ac93b5c0a49f"} Oct 07 15:20:33 crc kubenswrapper[4672]: I1007 15:20:33.748170 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" podStartSLOduration=1.9853960449999999 podStartE2EDuration="2.748149106s" podCreationTimestamp="2025-10-07 15:20:31 +0000 UTC" firstStartedPulling="2025-10-07 15:20:32.580923302 +0000 UTC m=+1909.556101883" lastFinishedPulling="2025-10-07 15:20:33.343676363 +0000 UTC m=+1910.318854944" observedRunningTime="2025-10-07 15:20:33.740530432 +0000 UTC m=+1910.715709023" watchObservedRunningTime="2025-10-07 15:20:33.748149106 +0000 UTC m=+1910.723327687" Oct 07 15:20:50 crc kubenswrapper[4672]: I1007 15:20:50.040896 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-gj4vc"] Oct 07 15:20:50 crc kubenswrapper[4672]: I1007 15:20:50.047612 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-gj4vc"] Oct 07 15:20:51 crc kubenswrapper[4672]: I1007 15:20:51.904919 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13223694-99fa-41ce-bc04-7b5387189b6d" path="/var/lib/kubelet/pods/13223694-99fa-41ce-bc04-7b5387189b6d/volumes" Oct 07 15:20:54 crc kubenswrapper[4672]: I1007 15:20:54.654781 4672 scope.go:117] "RemoveContainer" containerID="2007561754c1ecf46513b00e33e6a46c5635c673725dd1dfd0924024b25956a3" Oct 07 15:20:54 crc kubenswrapper[4672]: I1007 15:20:54.698476 4672 scope.go:117] "RemoveContainer" containerID="318d792eaadc0dd492cfcfe638afb2223686e220ff0f1447ec875606ac16874a" Oct 07 15:20:54 crc kubenswrapper[4672]: I1007 15:20:54.741754 4672 scope.go:117] "RemoveContainer" containerID="29fddc08f4c8cab4953684cf0883adb40aefb2e1a486541fa433e12e0049243c" Oct 07 15:20:54 crc kubenswrapper[4672]: I1007 15:20:54.801058 4672 scope.go:117] "RemoveContainer" containerID="59c0137c856310ab049e75be224dbcee133c57eceaa4a3127dec7b142bf296e1" Oct 07 15:21:27 crc kubenswrapper[4672]: I1007 15:21:27.178768 4672 generic.go:334] "Generic (PLEG): container finished" podID="fa45d7c4-d068-4df1-847b-589c7061b6e1" containerID="965eed4fcf86310bb4920012cd9e2b578aae05795b750d7c0d57ac93b5c0a49f" exitCode=2 Oct 07 15:21:27 crc kubenswrapper[4672]: I1007 15:21:27.178829 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" event={"ID":"fa45d7c4-d068-4df1-847b-589c7061b6e1","Type":"ContainerDied","Data":"965eed4fcf86310bb4920012cd9e2b578aae05795b750d7c0d57ac93b5c0a49f"} Oct 07 15:21:28 crc kubenswrapper[4672]: I1007 15:21:28.614939 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:21:28 crc kubenswrapper[4672]: I1007 15:21:28.780337 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-ssh-key\") pod \"fa45d7c4-d068-4df1-847b-589c7061b6e1\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " Oct 07 15:21:28 crc kubenswrapper[4672]: I1007 15:21:28.780403 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-inventory\") pod \"fa45d7c4-d068-4df1-847b-589c7061b6e1\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " Oct 07 15:21:28 crc kubenswrapper[4672]: I1007 15:21:28.780535 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8vwg\" (UniqueName: \"kubernetes.io/projected/fa45d7c4-d068-4df1-847b-589c7061b6e1-kube-api-access-v8vwg\") pod \"fa45d7c4-d068-4df1-847b-589c7061b6e1\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " Oct 07 15:21:28 crc kubenswrapper[4672]: I1007 15:21:28.785927 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa45d7c4-d068-4df1-847b-589c7061b6e1-kube-api-access-v8vwg" (OuterVolumeSpecName: "kube-api-access-v8vwg") pod "fa45d7c4-d068-4df1-847b-589c7061b6e1" (UID: "fa45d7c4-d068-4df1-847b-589c7061b6e1"). InnerVolumeSpecName "kube-api-access-v8vwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:21:28 crc kubenswrapper[4672]: E1007 15:21:28.803207 4672 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-inventory podName:fa45d7c4-d068-4df1-847b-589c7061b6e1 nodeName:}" failed. No retries permitted until 2025-10-07 15:21:29.303184293 +0000 UTC m=+1966.278362874 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-inventory") pod "fa45d7c4-d068-4df1-847b-589c7061b6e1" (UID: "fa45d7c4-d068-4df1-847b-589c7061b6e1") : error deleting /var/lib/kubelet/pods/fa45d7c4-d068-4df1-847b-589c7061b6e1/volume-subpaths: remove /var/lib/kubelet/pods/fa45d7c4-d068-4df1-847b-589c7061b6e1/volume-subpaths: no such file or directory Oct 07 15:21:28 crc kubenswrapper[4672]: I1007 15:21:28.806195 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fa45d7c4-d068-4df1-847b-589c7061b6e1" (UID: "fa45d7c4-d068-4df1-847b-589c7061b6e1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:21:28 crc kubenswrapper[4672]: I1007 15:21:28.882897 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:21:28 crc kubenswrapper[4672]: I1007 15:21:28.882928 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8vwg\" (UniqueName: \"kubernetes.io/projected/fa45d7c4-d068-4df1-847b-589c7061b6e1-kube-api-access-v8vwg\") on node \"crc\" DevicePath \"\"" Oct 07 15:21:29 crc kubenswrapper[4672]: I1007 15:21:29.198264 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" event={"ID":"fa45d7c4-d068-4df1-847b-589c7061b6e1","Type":"ContainerDied","Data":"86a09c763f205342ef27f63c997ebda1e7310600e4b1a5894d01ffe3a6c4f0e2"} Oct 07 15:21:29 crc kubenswrapper[4672]: I1007 15:21:29.198303 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86a09c763f205342ef27f63c997ebda1e7310600e4b1a5894d01ffe3a6c4f0e2" Oct 07 15:21:29 crc kubenswrapper[4672]: I1007 15:21:29.198785 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-df6lz" Oct 07 15:21:29 crc kubenswrapper[4672]: I1007 15:21:29.392105 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-inventory\") pod \"fa45d7c4-d068-4df1-847b-589c7061b6e1\" (UID: \"fa45d7c4-d068-4df1-847b-589c7061b6e1\") " Oct 07 15:21:29 crc kubenswrapper[4672]: I1007 15:21:29.394970 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-inventory" (OuterVolumeSpecName: "inventory") pod "fa45d7c4-d068-4df1-847b-589c7061b6e1" (UID: "fa45d7c4-d068-4df1-847b-589c7061b6e1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:21:29 crc kubenswrapper[4672]: I1007 15:21:29.494811 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa45d7c4-d068-4df1-847b-589c7061b6e1-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.026415 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj"] Oct 07 15:21:37 crc kubenswrapper[4672]: E1007 15:21:37.027605 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa45d7c4-d068-4df1-847b-589c7061b6e1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.027623 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa45d7c4-d068-4df1-847b-589c7061b6e1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.027869 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa45d7c4-d068-4df1-847b-589c7061b6e1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.028656 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.032214 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.032514 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.032660 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.033255 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.037367 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj"] Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.142446 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whzzd\" (UniqueName: \"kubernetes.io/projected/6d5fecb8-bc89-44d0-9413-4ab72d34390a-kube-api-access-whzzd\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.142495 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.142598 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.244746 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whzzd\" (UniqueName: \"kubernetes.io/projected/6d5fecb8-bc89-44d0-9413-4ab72d34390a-kube-api-access-whzzd\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.244793 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.244844 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.251397 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.252629 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.261333 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whzzd\" (UniqueName: \"kubernetes.io/projected/6d5fecb8-bc89-44d0-9413-4ab72d34390a-kube-api-access-whzzd\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.363233 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:21:37 crc kubenswrapper[4672]: I1007 15:21:37.846766 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj"] Oct 07 15:21:38 crc kubenswrapper[4672]: I1007 15:21:38.272565 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" event={"ID":"6d5fecb8-bc89-44d0-9413-4ab72d34390a","Type":"ContainerStarted","Data":"74f1be228859edfc3ce7ee8c644fce0f60c02573ef46de62dd6fb29312120237"} Oct 07 15:21:39 crc kubenswrapper[4672]: I1007 15:21:39.281250 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" event={"ID":"6d5fecb8-bc89-44d0-9413-4ab72d34390a","Type":"ContainerStarted","Data":"19cb6a0056de8de86dc9981733b1c45c3034c4ed0f22145f929afadee9867400"} Oct 07 15:21:39 crc kubenswrapper[4672]: I1007 15:21:39.295720 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" podStartSLOduration=1.837001571 podStartE2EDuration="2.295697454s" podCreationTimestamp="2025-10-07 15:21:37 +0000 UTC" firstStartedPulling="2025-10-07 15:21:37.852353943 +0000 UTC m=+1974.827532524" lastFinishedPulling="2025-10-07 15:21:38.311049826 +0000 UTC m=+1975.286228407" observedRunningTime="2025-10-07 15:21:39.293009425 +0000 UTC m=+1976.268188006" watchObservedRunningTime="2025-10-07 15:21:39.295697454 +0000 UTC m=+1976.270876035" Oct 07 15:21:56 crc kubenswrapper[4672]: I1007 15:21:56.650743 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:21:56 crc kubenswrapper[4672]: I1007 15:21:56.651280 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:22:21 crc kubenswrapper[4672]: I1007 15:22:21.615438 4672 generic.go:334] "Generic (PLEG): container finished" podID="6d5fecb8-bc89-44d0-9413-4ab72d34390a" containerID="19cb6a0056de8de86dc9981733b1c45c3034c4ed0f22145f929afadee9867400" exitCode=0 Oct 07 15:22:21 crc kubenswrapper[4672]: I1007 15:22:21.615528 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" event={"ID":"6d5fecb8-bc89-44d0-9413-4ab72d34390a","Type":"ContainerDied","Data":"19cb6a0056de8de86dc9981733b1c45c3034c4ed0f22145f929afadee9867400"} Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.026480 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.174461 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whzzd\" (UniqueName: \"kubernetes.io/projected/6d5fecb8-bc89-44d0-9413-4ab72d34390a-kube-api-access-whzzd\") pod \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.174594 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-ssh-key\") pod \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.174674 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-inventory\") pod \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\" (UID: \"6d5fecb8-bc89-44d0-9413-4ab72d34390a\") " Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.180301 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d5fecb8-bc89-44d0-9413-4ab72d34390a-kube-api-access-whzzd" (OuterVolumeSpecName: "kube-api-access-whzzd") pod "6d5fecb8-bc89-44d0-9413-4ab72d34390a" (UID: "6d5fecb8-bc89-44d0-9413-4ab72d34390a"). InnerVolumeSpecName "kube-api-access-whzzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.200253 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-inventory" (OuterVolumeSpecName: "inventory") pod "6d5fecb8-bc89-44d0-9413-4ab72d34390a" (UID: "6d5fecb8-bc89-44d0-9413-4ab72d34390a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.202346 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6d5fecb8-bc89-44d0-9413-4ab72d34390a" (UID: "6d5fecb8-bc89-44d0-9413-4ab72d34390a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.277523 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whzzd\" (UniqueName: \"kubernetes.io/projected/6d5fecb8-bc89-44d0-9413-4ab72d34390a-kube-api-access-whzzd\") on node \"crc\" DevicePath \"\"" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.277560 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.277570 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d5fecb8-bc89-44d0-9413-4ab72d34390a-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.632977 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" event={"ID":"6d5fecb8-bc89-44d0-9413-4ab72d34390a","Type":"ContainerDied","Data":"74f1be228859edfc3ce7ee8c644fce0f60c02573ef46de62dd6fb29312120237"} Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.633038 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74f1be228859edfc3ce7ee8c644fce0f60c02573ef46de62dd6fb29312120237" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.633088 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.711986 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-mslxh"] Oct 07 15:22:23 crc kubenswrapper[4672]: E1007 15:22:23.712436 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5fecb8-bc89-44d0-9413-4ab72d34390a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.712456 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5fecb8-bc89-44d0-9413-4ab72d34390a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.712624 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5fecb8-bc89-44d0-9413-4ab72d34390a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.713429 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.716441 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.716650 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.716690 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.717226 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.721530 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-mslxh"] Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.887737 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-mslxh\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.887849 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-mslxh\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.887895 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cznvx\" (UniqueName: \"kubernetes.io/projected/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-kube-api-access-cznvx\") pod \"ssh-known-hosts-edpm-deployment-mslxh\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.989124 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-mslxh\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.989195 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cznvx\" (UniqueName: \"kubernetes.io/projected/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-kube-api-access-cznvx\") pod \"ssh-known-hosts-edpm-deployment-mslxh\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:23 crc kubenswrapper[4672]: I1007 15:22:23.989388 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-mslxh\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:24 crc kubenswrapper[4672]: I1007 15:22:23.993734 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-mslxh\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:24 crc kubenswrapper[4672]: I1007 15:22:24.001846 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-mslxh\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:24 crc kubenswrapper[4672]: I1007 15:22:24.005816 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cznvx\" (UniqueName: \"kubernetes.io/projected/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-kube-api-access-cznvx\") pod \"ssh-known-hosts-edpm-deployment-mslxh\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:24 crc kubenswrapper[4672]: I1007 15:22:24.043756 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:24 crc kubenswrapper[4672]: I1007 15:22:24.549713 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-mslxh"] Oct 07 15:22:24 crc kubenswrapper[4672]: I1007 15:22:24.642192 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" event={"ID":"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f","Type":"ContainerStarted","Data":"35bb3568e0987cb12b993a9d73c1f5d844374081d571d98b34837664ddc20e7a"} Oct 07 15:22:25 crc kubenswrapper[4672]: I1007 15:22:25.653065 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" event={"ID":"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f","Type":"ContainerStarted","Data":"6839d7bc775fa5c220a0c42b2fd32341ce2a2b4d6bda4a3389e4d4c665a6da0f"} Oct 07 15:22:25 crc kubenswrapper[4672]: I1007 15:22:25.675195 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" podStartSLOduration=1.92405792 podStartE2EDuration="2.675168478s" podCreationTimestamp="2025-10-07 15:22:23 +0000 UTC" firstStartedPulling="2025-10-07 15:22:24.557390706 +0000 UTC m=+2021.532569287" lastFinishedPulling="2025-10-07 15:22:25.308501264 +0000 UTC m=+2022.283679845" observedRunningTime="2025-10-07 15:22:25.666122283 +0000 UTC m=+2022.641300864" watchObservedRunningTime="2025-10-07 15:22:25.675168478 +0000 UTC m=+2022.650347059" Oct 07 15:22:26 crc kubenswrapper[4672]: I1007 15:22:26.650670 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:22:26 crc kubenswrapper[4672]: I1007 15:22:26.650929 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:22:32 crc kubenswrapper[4672]: I1007 15:22:32.709260 4672 generic.go:334] "Generic (PLEG): container finished" podID="b69369f3-e53e-4aea-ab35-dcc1e9f3d56f" containerID="6839d7bc775fa5c220a0c42b2fd32341ce2a2b4d6bda4a3389e4d4c665a6da0f" exitCode=0 Oct 07 15:22:32 crc kubenswrapper[4672]: I1007 15:22:32.709302 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" event={"ID":"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f","Type":"ContainerDied","Data":"6839d7bc775fa5c220a0c42b2fd32341ce2a2b4d6bda4a3389e4d4c665a6da0f"} Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.064166 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.171367 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-inventory-0\") pod \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.171728 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cznvx\" (UniqueName: \"kubernetes.io/projected/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-kube-api-access-cznvx\") pod \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.171833 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-ssh-key-openstack-edpm-ipam\") pod \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\" (UID: \"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f\") " Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.176892 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-kube-api-access-cznvx" (OuterVolumeSpecName: "kube-api-access-cznvx") pod "b69369f3-e53e-4aea-ab35-dcc1e9f3d56f" (UID: "b69369f3-e53e-4aea-ab35-dcc1e9f3d56f"). InnerVolumeSpecName "kube-api-access-cznvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.197522 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "b69369f3-e53e-4aea-ab35-dcc1e9f3d56f" (UID: "b69369f3-e53e-4aea-ab35-dcc1e9f3d56f"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.198512 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "b69369f3-e53e-4aea-ab35-dcc1e9f3d56f" (UID: "b69369f3-e53e-4aea-ab35-dcc1e9f3d56f"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.275271 4672 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.275728 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cznvx\" (UniqueName: \"kubernetes.io/projected/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-kube-api-access-cznvx\") on node \"crc\" DevicePath \"\"" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.275817 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b69369f3-e53e-4aea-ab35-dcc1e9f3d56f-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.725821 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" event={"ID":"b69369f3-e53e-4aea-ab35-dcc1e9f3d56f","Type":"ContainerDied","Data":"35bb3568e0987cb12b993a9d73c1f5d844374081d571d98b34837664ddc20e7a"} Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.726239 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35bb3568e0987cb12b993a9d73c1f5d844374081d571d98b34837664ddc20e7a" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.725864 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-mslxh" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.818375 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp"] Oct 07 15:22:34 crc kubenswrapper[4672]: E1007 15:22:34.819148 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b69369f3-e53e-4aea-ab35-dcc1e9f3d56f" containerName="ssh-known-hosts-edpm-deployment" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.819165 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="b69369f3-e53e-4aea-ab35-dcc1e9f3d56f" containerName="ssh-known-hosts-edpm-deployment" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.820429 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="b69369f3-e53e-4aea-ab35-dcc1e9f3d56f" containerName="ssh-known-hosts-edpm-deployment" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.861514 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.870114 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.870373 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.870501 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.870622 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.870908 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp"] Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.988195 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mr4zp\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.988301 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29x7d\" (UniqueName: \"kubernetes.io/projected/414dfaec-a340-4080-b3e7-da4966078c60-kube-api-access-29x7d\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mr4zp\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:34 crc kubenswrapper[4672]: I1007 15:22:34.988334 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mr4zp\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:35 crc kubenswrapper[4672]: I1007 15:22:35.089731 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29x7d\" (UniqueName: \"kubernetes.io/projected/414dfaec-a340-4080-b3e7-da4966078c60-kube-api-access-29x7d\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mr4zp\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:35 crc kubenswrapper[4672]: I1007 15:22:35.089784 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mr4zp\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:35 crc kubenswrapper[4672]: I1007 15:22:35.089890 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mr4zp\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:35 crc kubenswrapper[4672]: I1007 15:22:35.094451 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mr4zp\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:35 crc kubenswrapper[4672]: I1007 15:22:35.101775 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mr4zp\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:35 crc kubenswrapper[4672]: I1007 15:22:35.105591 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29x7d\" (UniqueName: \"kubernetes.io/projected/414dfaec-a340-4080-b3e7-da4966078c60-kube-api-access-29x7d\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mr4zp\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:35 crc kubenswrapper[4672]: I1007 15:22:35.185153 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:35 crc kubenswrapper[4672]: I1007 15:22:35.667075 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp"] Oct 07 15:22:35 crc kubenswrapper[4672]: I1007 15:22:35.737616 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" event={"ID":"414dfaec-a340-4080-b3e7-da4966078c60","Type":"ContainerStarted","Data":"f88c2caf7499f67637696637b479757d0d442cb5d8da8670a2896d5447346753"} Oct 07 15:22:36 crc kubenswrapper[4672]: I1007 15:22:36.746804 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" event={"ID":"414dfaec-a340-4080-b3e7-da4966078c60","Type":"ContainerStarted","Data":"59764e0a7bff2acc6751a0977ccea772a04b5c82dd219e0e56627ae3ce12d67d"} Oct 07 15:22:36 crc kubenswrapper[4672]: I1007 15:22:36.769972 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" podStartSLOduration=2.287904225 podStartE2EDuration="2.769943892s" podCreationTimestamp="2025-10-07 15:22:34 +0000 UTC" firstStartedPulling="2025-10-07 15:22:35.674645849 +0000 UTC m=+2032.649824430" lastFinishedPulling="2025-10-07 15:22:36.156685516 +0000 UTC m=+2033.131864097" observedRunningTime="2025-10-07 15:22:36.767546272 +0000 UTC m=+2033.742724853" watchObservedRunningTime="2025-10-07 15:22:36.769943892 +0000 UTC m=+2033.745122473" Oct 07 15:22:44 crc kubenswrapper[4672]: I1007 15:22:44.827331 4672 generic.go:334] "Generic (PLEG): container finished" podID="414dfaec-a340-4080-b3e7-da4966078c60" containerID="59764e0a7bff2acc6751a0977ccea772a04b5c82dd219e0e56627ae3ce12d67d" exitCode=0 Oct 07 15:22:44 crc kubenswrapper[4672]: I1007 15:22:44.827511 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" event={"ID":"414dfaec-a340-4080-b3e7-da4966078c60","Type":"ContainerDied","Data":"59764e0a7bff2acc6751a0977ccea772a04b5c82dd219e0e56627ae3ce12d67d"} Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.250549 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.403591 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-inventory\") pod \"414dfaec-a340-4080-b3e7-da4966078c60\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.403748 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-ssh-key\") pod \"414dfaec-a340-4080-b3e7-da4966078c60\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.403772 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29x7d\" (UniqueName: \"kubernetes.io/projected/414dfaec-a340-4080-b3e7-da4966078c60-kube-api-access-29x7d\") pod \"414dfaec-a340-4080-b3e7-da4966078c60\" (UID: \"414dfaec-a340-4080-b3e7-da4966078c60\") " Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.410071 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/414dfaec-a340-4080-b3e7-da4966078c60-kube-api-access-29x7d" (OuterVolumeSpecName: "kube-api-access-29x7d") pod "414dfaec-a340-4080-b3e7-da4966078c60" (UID: "414dfaec-a340-4080-b3e7-da4966078c60"). InnerVolumeSpecName "kube-api-access-29x7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.434067 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "414dfaec-a340-4080-b3e7-da4966078c60" (UID: "414dfaec-a340-4080-b3e7-da4966078c60"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.435451 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-inventory" (OuterVolumeSpecName: "inventory") pod "414dfaec-a340-4080-b3e7-da4966078c60" (UID: "414dfaec-a340-4080-b3e7-da4966078c60"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.506142 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.506175 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29x7d\" (UniqueName: \"kubernetes.io/projected/414dfaec-a340-4080-b3e7-da4966078c60-kube-api-access-29x7d\") on node \"crc\" DevicePath \"\"" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.506185 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/414dfaec-a340-4080-b3e7-da4966078c60-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.849449 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" event={"ID":"414dfaec-a340-4080-b3e7-da4966078c60","Type":"ContainerDied","Data":"f88c2caf7499f67637696637b479757d0d442cb5d8da8670a2896d5447346753"} Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.849499 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mr4zp" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.849507 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f88c2caf7499f67637696637b479757d0d442cb5d8da8670a2896d5447346753" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.913425 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f"] Oct 07 15:22:46 crc kubenswrapper[4672]: E1007 15:22:46.913815 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="414dfaec-a340-4080-b3e7-da4966078c60" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.913834 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="414dfaec-a340-4080-b3e7-da4966078c60" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.914060 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="414dfaec-a340-4080-b3e7-da4966078c60" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.914719 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.917854 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.918177 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.918187 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.918567 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:22:46 crc kubenswrapper[4672]: I1007 15:22:46.927483 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f"] Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.015100 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.015174 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w2mc\" (UniqueName: \"kubernetes.io/projected/c1ded63b-448b-4e94-9c53-e87268f775ac-kube-api-access-4w2mc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.015308 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.117240 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.117680 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w2mc\" (UniqueName: \"kubernetes.io/projected/c1ded63b-448b-4e94-9c53-e87268f775ac-kube-api-access-4w2mc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.117858 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.120824 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.122476 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.133944 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w2mc\" (UniqueName: \"kubernetes.io/projected/c1ded63b-448b-4e94-9c53-e87268f775ac-kube-api-access-4w2mc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.231191 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.754383 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f"] Oct 07 15:22:47 crc kubenswrapper[4672]: I1007 15:22:47.861952 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" event={"ID":"c1ded63b-448b-4e94-9c53-e87268f775ac","Type":"ContainerStarted","Data":"d9700f6ec3587986eecf4b70df050eb7d03f3c1391d587d19e0597c3eb9e30c3"} Oct 07 15:22:49 crc kubenswrapper[4672]: I1007 15:22:49.904505 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" event={"ID":"c1ded63b-448b-4e94-9c53-e87268f775ac","Type":"ContainerStarted","Data":"0d50690138c8132acacb934408e67f26baae32d537d09a6d139ef02c0caae7a4"} Oct 07 15:22:49 crc kubenswrapper[4672]: I1007 15:22:49.915768 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" podStartSLOduration=3.071964663 podStartE2EDuration="3.91574844s" podCreationTimestamp="2025-10-07 15:22:46 +0000 UTC" firstStartedPulling="2025-10-07 15:22:47.764775965 +0000 UTC m=+2044.739954546" lastFinishedPulling="2025-10-07 15:22:48.608559742 +0000 UTC m=+2045.583738323" observedRunningTime="2025-10-07 15:22:49.910790944 +0000 UTC m=+2046.885969535" watchObservedRunningTime="2025-10-07 15:22:49.91574844 +0000 UTC m=+2046.890927021" Oct 07 15:22:56 crc kubenswrapper[4672]: I1007 15:22:56.651055 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:22:56 crc kubenswrapper[4672]: I1007 15:22:56.651620 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:22:56 crc kubenswrapper[4672]: I1007 15:22:56.651673 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:22:56 crc kubenswrapper[4672]: I1007 15:22:56.652432 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1f0049b9a568b508db3acc2c4e9d42f1ee71b196cad6504e841571287e2104a4"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:22:56 crc kubenswrapper[4672]: I1007 15:22:56.652561 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://1f0049b9a568b508db3acc2c4e9d42f1ee71b196cad6504e841571287e2104a4" gracePeriod=600 Oct 07 15:22:56 crc kubenswrapper[4672]: I1007 15:22:56.981470 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="1f0049b9a568b508db3acc2c4e9d42f1ee71b196cad6504e841571287e2104a4" exitCode=0 Oct 07 15:22:56 crc kubenswrapper[4672]: I1007 15:22:56.981528 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"1f0049b9a568b508db3acc2c4e9d42f1ee71b196cad6504e841571287e2104a4"} Oct 07 15:22:56 crc kubenswrapper[4672]: I1007 15:22:56.981566 4672 scope.go:117] "RemoveContainer" containerID="c519e5246d9d14f2b377325e9875be5a6f9b0087986b91d92be6237d94fe5429" Oct 07 15:22:58 crc kubenswrapper[4672]: I1007 15:22:58.015465 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1"} Oct 07 15:22:59 crc kubenswrapper[4672]: I1007 15:22:59.026399 4672 generic.go:334] "Generic (PLEG): container finished" podID="c1ded63b-448b-4e94-9c53-e87268f775ac" containerID="0d50690138c8132acacb934408e67f26baae32d537d09a6d139ef02c0caae7a4" exitCode=0 Oct 07 15:22:59 crc kubenswrapper[4672]: I1007 15:22:59.026504 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" event={"ID":"c1ded63b-448b-4e94-9c53-e87268f775ac","Type":"ContainerDied","Data":"0d50690138c8132acacb934408e67f26baae32d537d09a6d139ef02c0caae7a4"} Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.486721 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.585279 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-inventory\") pod \"c1ded63b-448b-4e94-9c53-e87268f775ac\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.585481 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-ssh-key\") pod \"c1ded63b-448b-4e94-9c53-e87268f775ac\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.585589 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w2mc\" (UniqueName: \"kubernetes.io/projected/c1ded63b-448b-4e94-9c53-e87268f775ac-kube-api-access-4w2mc\") pod \"c1ded63b-448b-4e94-9c53-e87268f775ac\" (UID: \"c1ded63b-448b-4e94-9c53-e87268f775ac\") " Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.592449 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1ded63b-448b-4e94-9c53-e87268f775ac-kube-api-access-4w2mc" (OuterVolumeSpecName: "kube-api-access-4w2mc") pod "c1ded63b-448b-4e94-9c53-e87268f775ac" (UID: "c1ded63b-448b-4e94-9c53-e87268f775ac"). InnerVolumeSpecName "kube-api-access-4w2mc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.619135 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c1ded63b-448b-4e94-9c53-e87268f775ac" (UID: "c1ded63b-448b-4e94-9c53-e87268f775ac"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.621006 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-inventory" (OuterVolumeSpecName: "inventory") pod "c1ded63b-448b-4e94-9c53-e87268f775ac" (UID: "c1ded63b-448b-4e94-9c53-e87268f775ac"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.687733 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.687788 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w2mc\" (UniqueName: \"kubernetes.io/projected/c1ded63b-448b-4e94-9c53-e87268f775ac-kube-api-access-4w2mc\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:00 crc kubenswrapper[4672]: I1007 15:23:00.687805 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ded63b-448b-4e94-9c53-e87268f775ac-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.048465 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" event={"ID":"c1ded63b-448b-4e94-9c53-e87268f775ac","Type":"ContainerDied","Data":"d9700f6ec3587986eecf4b70df050eb7d03f3c1391d587d19e0597c3eb9e30c3"} Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.048960 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9700f6ec3587986eecf4b70df050eb7d03f3c1391d587d19e0597c3eb9e30c3" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.048491 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.171221 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj"] Oct 07 15:23:01 crc kubenswrapper[4672]: E1007 15:23:01.171779 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ded63b-448b-4e94-9c53-e87268f775ac" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.171805 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ded63b-448b-4e94-9c53-e87268f775ac" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.172058 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1ded63b-448b-4e94-9c53-e87268f775ac" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.172966 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.183602 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.183738 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.183799 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.183892 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.184117 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.183610 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.184275 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.184524 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.190198 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj"] Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.303784 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzlx8\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-kube-api-access-jzlx8\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.303835 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.303858 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.303966 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304052 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304120 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304188 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304218 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304253 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304631 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304669 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304748 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304815 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.304839 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.406022 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.406084 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.406122 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.406173 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.406199 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.406218 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.407025 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.407072 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.407117 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.407161 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.407185 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.407235 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzlx8\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-kube-api-access-jzlx8\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.407265 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.407288 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.412242 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.412940 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.413015 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.414327 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.417416 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.417559 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.418478 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.419831 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.420600 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.421284 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.427387 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.427418 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.427677 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.428943 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzlx8\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-kube-api-access-jzlx8\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:01 crc kubenswrapper[4672]: I1007 15:23:01.500382 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:02 crc kubenswrapper[4672]: I1007 15:23:02.097985 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj"] Oct 07 15:23:03 crc kubenswrapper[4672]: I1007 15:23:03.083489 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" event={"ID":"be75457c-c14d-4827-850e-2619993cc1f6","Type":"ContainerStarted","Data":"845173e2de9876dee7cc492311568a42b31d83b9914e4c8471928f05bcafae69"} Oct 07 15:23:04 crc kubenswrapper[4672]: I1007 15:23:04.096765 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" event={"ID":"be75457c-c14d-4827-850e-2619993cc1f6","Type":"ContainerStarted","Data":"7589d8babb2ca5f8061f20d65f4300bc9f6d3ce5ea334c4f9dcc2c4034b360ec"} Oct 07 15:23:04 crc kubenswrapper[4672]: I1007 15:23:04.124379 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" podStartSLOduration=2.339192521 podStartE2EDuration="3.124354169s" podCreationTimestamp="2025-10-07 15:23:01 +0000 UTC" firstStartedPulling="2025-10-07 15:23:02.093678572 +0000 UTC m=+2059.068857153" lastFinishedPulling="2025-10-07 15:23:02.87884022 +0000 UTC m=+2059.854018801" observedRunningTime="2025-10-07 15:23:04.119159217 +0000 UTC m=+2061.094337798" watchObservedRunningTime="2025-10-07 15:23:04.124354169 +0000 UTC m=+2061.099532750" Oct 07 15:23:41 crc kubenswrapper[4672]: I1007 15:23:41.382528 4672 generic.go:334] "Generic (PLEG): container finished" podID="be75457c-c14d-4827-850e-2619993cc1f6" containerID="7589d8babb2ca5f8061f20d65f4300bc9f6d3ce5ea334c4f9dcc2c4034b360ec" exitCode=0 Oct 07 15:23:41 crc kubenswrapper[4672]: I1007 15:23:41.382600 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" event={"ID":"be75457c-c14d-4827-850e-2619993cc1f6","Type":"ContainerDied","Data":"7589d8babb2ca5f8061f20d65f4300bc9f6d3ce5ea334c4f9dcc2c4034b360ec"} Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.782212 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871512 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-neutron-metadata-combined-ca-bundle\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871573 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871613 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ovn-combined-ca-bundle\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871647 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871688 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzlx8\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-kube-api-access-jzlx8\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871717 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871784 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-telemetry-combined-ca-bundle\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871817 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-libvirt-combined-ca-bundle\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871850 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-repo-setup-combined-ca-bundle\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871884 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871929 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-bootstrap-combined-ca-bundle\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.871945 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ssh-key\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.872030 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-nova-combined-ca-bundle\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.872074 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-inventory\") pod \"be75457c-c14d-4827-850e-2619993cc1f6\" (UID: \"be75457c-c14d-4827-850e-2619993cc1f6\") " Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.877727 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.878607 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.879364 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.879436 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.879725 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-kube-api-access-jzlx8" (OuterVolumeSpecName: "kube-api-access-jzlx8") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "kube-api-access-jzlx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.879970 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.880389 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.881436 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.882541 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.883156 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.883339 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.883381 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.902844 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-inventory" (OuterVolumeSpecName: "inventory") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.908624 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "be75457c-c14d-4827-850e-2619993cc1f6" (UID: "be75457c-c14d-4827-850e-2619993cc1f6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974645 4672 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974682 4672 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974693 4672 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974703 4672 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974714 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzlx8\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-kube-api-access-jzlx8\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974723 4672 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974731 4672 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974740 4672 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974748 4672 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974759 4672 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/be75457c-c14d-4827-850e-2619993cc1f6-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974770 4672 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974781 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974791 4672 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:42 crc kubenswrapper[4672]: I1007 15:23:42.974803 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be75457c-c14d-4827-850e-2619993cc1f6-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.006863 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ptp45"] Oct 07 15:23:43 crc kubenswrapper[4672]: E1007 15:23:43.007364 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be75457c-c14d-4827-850e-2619993cc1f6" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.007396 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be75457c-c14d-4827-850e-2619993cc1f6" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.007644 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be75457c-c14d-4827-850e-2619993cc1f6" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.009316 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.017449 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ptp45"] Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.076669 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-utilities\") pod \"redhat-operators-ptp45\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.076855 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-catalog-content\") pod \"redhat-operators-ptp45\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.076921 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btzmk\" (UniqueName: \"kubernetes.io/projected/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-kube-api-access-btzmk\") pod \"redhat-operators-ptp45\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.178590 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-catalog-content\") pod \"redhat-operators-ptp45\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.178709 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btzmk\" (UniqueName: \"kubernetes.io/projected/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-kube-api-access-btzmk\") pod \"redhat-operators-ptp45\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.178781 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-utilities\") pod \"redhat-operators-ptp45\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.179223 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-utilities\") pod \"redhat-operators-ptp45\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.179223 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-catalog-content\") pod \"redhat-operators-ptp45\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.196730 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btzmk\" (UniqueName: \"kubernetes.io/projected/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-kube-api-access-btzmk\") pod \"redhat-operators-ptp45\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.327144 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.402233 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" event={"ID":"be75457c-c14d-4827-850e-2619993cc1f6","Type":"ContainerDied","Data":"845173e2de9876dee7cc492311568a42b31d83b9914e4c8471928f05bcafae69"} Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.402277 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="845173e2de9876dee7cc492311568a42b31d83b9914e4c8471928f05bcafae69" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.402343 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.561900 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9"] Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.566928 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.571476 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.571804 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.572770 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9"] Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.572874 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.572889 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.574115 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.687732 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.688002 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.688154 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.688227 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fmpq\" (UniqueName: \"kubernetes.io/projected/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-kube-api-access-5fmpq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.688298 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.790290 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.790353 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.790415 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.790435 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fmpq\" (UniqueName: \"kubernetes.io/projected/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-kube-api-access-5fmpq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.790451 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.791490 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.795666 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.797833 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.798041 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.811093 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ptp45"] Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.811447 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fmpq\" (UniqueName: \"kubernetes.io/projected/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-kube-api-access-5fmpq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-lngw9\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.897755 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:23:43 crc kubenswrapper[4672]: I1007 15:23:43.903449 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:23:44 crc kubenswrapper[4672]: I1007 15:23:44.413771 4672 generic.go:334] "Generic (PLEG): container finished" podID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerID="cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614" exitCode=0 Oct 07 15:23:44 crc kubenswrapper[4672]: I1007 15:23:44.413890 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ptp45" event={"ID":"864d00b7-ad29-49a1-9667-6ca3ec16b8d2","Type":"ContainerDied","Data":"cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614"} Oct 07 15:23:44 crc kubenswrapper[4672]: I1007 15:23:44.414316 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ptp45" event={"ID":"864d00b7-ad29-49a1-9667-6ca3ec16b8d2","Type":"ContainerStarted","Data":"85d5aa9dc463f0f6413df56cc6d42ab74b5670a9fb352544c3b5d1139e9caf1b"} Oct 07 15:23:44 crc kubenswrapper[4672]: I1007 15:23:44.415710 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 15:23:44 crc kubenswrapper[4672]: I1007 15:23:44.472330 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9"] Oct 07 15:23:44 crc kubenswrapper[4672]: W1007 15:23:44.474214 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04f4d7c8_1cca_4233_9cc5_dfa205f89c49.slice/crio-5798636fcfa0050dfa538c3d3f593372e3ddd16d7657540dc938078c53bd2f6b WatchSource:0}: Error finding container 5798636fcfa0050dfa538c3d3f593372e3ddd16d7657540dc938078c53bd2f6b: Status 404 returned error can't find the container with id 5798636fcfa0050dfa538c3d3f593372e3ddd16d7657540dc938078c53bd2f6b Oct 07 15:23:44 crc kubenswrapper[4672]: I1007 15:23:44.936259 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:23:45 crc kubenswrapper[4672]: I1007 15:23:45.423507 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" event={"ID":"04f4d7c8-1cca-4233-9cc5-dfa205f89c49","Type":"ContainerStarted","Data":"f88e908508e0c31b02d52dda43a8b85c8fa513c693027cd0f4fc6aa78fc53fd0"} Oct 07 15:23:45 crc kubenswrapper[4672]: I1007 15:23:45.423554 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" event={"ID":"04f4d7c8-1cca-4233-9cc5-dfa205f89c49","Type":"ContainerStarted","Data":"5798636fcfa0050dfa538c3d3f593372e3ddd16d7657540dc938078c53bd2f6b"} Oct 07 15:23:45 crc kubenswrapper[4672]: I1007 15:23:45.442745 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" podStartSLOduration=1.986048654 podStartE2EDuration="2.442727798s" podCreationTimestamp="2025-10-07 15:23:43 +0000 UTC" firstStartedPulling="2025-10-07 15:23:44.476478249 +0000 UTC m=+2101.451656820" lastFinishedPulling="2025-10-07 15:23:44.933157383 +0000 UTC m=+2101.908335964" observedRunningTime="2025-10-07 15:23:45.441839392 +0000 UTC m=+2102.417017973" watchObservedRunningTime="2025-10-07 15:23:45.442727798 +0000 UTC m=+2102.417906379" Oct 07 15:23:46 crc kubenswrapper[4672]: I1007 15:23:46.435883 4672 generic.go:334] "Generic (PLEG): container finished" podID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerID="3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00" exitCode=0 Oct 07 15:23:46 crc kubenswrapper[4672]: I1007 15:23:46.435953 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ptp45" event={"ID":"864d00b7-ad29-49a1-9667-6ca3ec16b8d2","Type":"ContainerDied","Data":"3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00"} Oct 07 15:23:47 crc kubenswrapper[4672]: I1007 15:23:47.446027 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ptp45" event={"ID":"864d00b7-ad29-49a1-9667-6ca3ec16b8d2","Type":"ContainerStarted","Data":"d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1"} Oct 07 15:23:47 crc kubenswrapper[4672]: I1007 15:23:47.465304 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ptp45" podStartSLOduration=2.82844468 podStartE2EDuration="5.465284376s" podCreationTimestamp="2025-10-07 15:23:42 +0000 UTC" firstStartedPulling="2025-10-07 15:23:44.415418808 +0000 UTC m=+2101.390597399" lastFinishedPulling="2025-10-07 15:23:47.052258524 +0000 UTC m=+2104.027437095" observedRunningTime="2025-10-07 15:23:47.463197305 +0000 UTC m=+2104.438375896" watchObservedRunningTime="2025-10-07 15:23:47.465284376 +0000 UTC m=+2104.440462957" Oct 07 15:23:53 crc kubenswrapper[4672]: I1007 15:23:53.328430 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:53 crc kubenswrapper[4672]: I1007 15:23:53.328878 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:53 crc kubenswrapper[4672]: I1007 15:23:53.375564 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:53 crc kubenswrapper[4672]: I1007 15:23:53.566374 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:53 crc kubenswrapper[4672]: I1007 15:23:53.615162 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ptp45"] Oct 07 15:23:55 crc kubenswrapper[4672]: I1007 15:23:55.515844 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ptp45" podUID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerName="registry-server" containerID="cri-o://d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1" gracePeriod=2 Oct 07 15:23:55 crc kubenswrapper[4672]: I1007 15:23:55.950112 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.033959 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-utilities\") pod \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.034178 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btzmk\" (UniqueName: \"kubernetes.io/projected/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-kube-api-access-btzmk\") pod \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.034280 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-catalog-content\") pod \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\" (UID: \"864d00b7-ad29-49a1-9667-6ca3ec16b8d2\") " Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.035315 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-utilities" (OuterVolumeSpecName: "utilities") pod "864d00b7-ad29-49a1-9667-6ca3ec16b8d2" (UID: "864d00b7-ad29-49a1-9667-6ca3ec16b8d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.040536 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-kube-api-access-btzmk" (OuterVolumeSpecName: "kube-api-access-btzmk") pod "864d00b7-ad29-49a1-9667-6ca3ec16b8d2" (UID: "864d00b7-ad29-49a1-9667-6ca3ec16b8d2"). InnerVolumeSpecName "kube-api-access-btzmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.136346 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btzmk\" (UniqueName: \"kubernetes.io/projected/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-kube-api-access-btzmk\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.136386 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.526658 4672 generic.go:334] "Generic (PLEG): container finished" podID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerID="d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1" exitCode=0 Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.526973 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ptp45" event={"ID":"864d00b7-ad29-49a1-9667-6ca3ec16b8d2","Type":"ContainerDied","Data":"d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1"} Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.527004 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ptp45" event={"ID":"864d00b7-ad29-49a1-9667-6ca3ec16b8d2","Type":"ContainerDied","Data":"85d5aa9dc463f0f6413df56cc6d42ab74b5670a9fb352544c3b5d1139e9caf1b"} Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.527066 4672 scope.go:117] "RemoveContainer" containerID="d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.527222 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ptp45" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.549512 4672 scope.go:117] "RemoveContainer" containerID="3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.601129 4672 scope.go:117] "RemoveContainer" containerID="cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.640121 4672 scope.go:117] "RemoveContainer" containerID="d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1" Oct 07 15:23:56 crc kubenswrapper[4672]: E1007 15:23:56.640491 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1\": container with ID starting with d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1 not found: ID does not exist" containerID="d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.640542 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1"} err="failed to get container status \"d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1\": rpc error: code = NotFound desc = could not find container \"d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1\": container with ID starting with d39532b5dfc7b2868a6cb57ae3599244a43c60fe4ed7604e2b8846cdf35e3ff1 not found: ID does not exist" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.640568 4672 scope.go:117] "RemoveContainer" containerID="3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00" Oct 07 15:23:56 crc kubenswrapper[4672]: E1007 15:23:56.641105 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00\": container with ID starting with 3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00 not found: ID does not exist" containerID="3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.641135 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00"} err="failed to get container status \"3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00\": rpc error: code = NotFound desc = could not find container \"3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00\": container with ID starting with 3453ccca58f006043815f26f24976ec23de53a615e4b0f28a956a8c708edba00 not found: ID does not exist" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.641157 4672 scope.go:117] "RemoveContainer" containerID="cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614" Oct 07 15:23:56 crc kubenswrapper[4672]: E1007 15:23:56.641458 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614\": container with ID starting with cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614 not found: ID does not exist" containerID="cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614" Oct 07 15:23:56 crc kubenswrapper[4672]: I1007 15:23:56.641482 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614"} err="failed to get container status \"cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614\": rpc error: code = NotFound desc = could not find container \"cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614\": container with ID starting with cddc0493ab7df89760f50df8d21b085b90098343417cd6a02872be425331f614 not found: ID does not exist" Oct 07 15:23:57 crc kubenswrapper[4672]: I1007 15:23:57.332078 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "864d00b7-ad29-49a1-9667-6ca3ec16b8d2" (UID: "864d00b7-ad29-49a1-9667-6ca3ec16b8d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:23:57 crc kubenswrapper[4672]: I1007 15:23:57.356145 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/864d00b7-ad29-49a1-9667-6ca3ec16b8d2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:23:57 crc kubenswrapper[4672]: I1007 15:23:57.457527 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ptp45"] Oct 07 15:23:57 crc kubenswrapper[4672]: I1007 15:23:57.465100 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ptp45"] Oct 07 15:23:57 crc kubenswrapper[4672]: I1007 15:23:57.903041 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" path="/var/lib/kubelet/pods/864d00b7-ad29-49a1-9667-6ca3ec16b8d2/volumes" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.483501 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wzkxf"] Oct 07 15:24:21 crc kubenswrapper[4672]: E1007 15:24:21.484671 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerName="extract-utilities" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.484690 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerName="extract-utilities" Oct 07 15:24:21 crc kubenswrapper[4672]: E1007 15:24:21.484710 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerName="extract-content" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.484718 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerName="extract-content" Oct 07 15:24:21 crc kubenswrapper[4672]: E1007 15:24:21.484774 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerName="registry-server" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.484782 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerName="registry-server" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.484989 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="864d00b7-ad29-49a1-9667-6ca3ec16b8d2" containerName="registry-server" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.487002 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.497162 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzkxf"] Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.690031 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-catalog-content\") pod \"redhat-marketplace-wzkxf\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.690111 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-utilities\") pod \"redhat-marketplace-wzkxf\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.690185 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mgxj\" (UniqueName: \"kubernetes.io/projected/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-kube-api-access-7mgxj\") pod \"redhat-marketplace-wzkxf\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.791620 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-catalog-content\") pod \"redhat-marketplace-wzkxf\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.791736 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-utilities\") pod \"redhat-marketplace-wzkxf\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.791861 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mgxj\" (UniqueName: \"kubernetes.io/projected/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-kube-api-access-7mgxj\") pod \"redhat-marketplace-wzkxf\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.792593 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-catalog-content\") pod \"redhat-marketplace-wzkxf\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.792639 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-utilities\") pod \"redhat-marketplace-wzkxf\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:21 crc kubenswrapper[4672]: I1007 15:24:21.812182 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mgxj\" (UniqueName: \"kubernetes.io/projected/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-kube-api-access-7mgxj\") pod \"redhat-marketplace-wzkxf\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:22 crc kubenswrapper[4672]: I1007 15:24:22.107042 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:22 crc kubenswrapper[4672]: I1007 15:24:22.518717 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzkxf"] Oct 07 15:24:22 crc kubenswrapper[4672]: I1007 15:24:22.761660 4672 generic.go:334] "Generic (PLEG): container finished" podID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerID="ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803" exitCode=0 Oct 07 15:24:22 crc kubenswrapper[4672]: I1007 15:24:22.761706 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzkxf" event={"ID":"927e330d-8a2c-44c2-ac03-b5fa90ca3bda","Type":"ContainerDied","Data":"ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803"} Oct 07 15:24:22 crc kubenswrapper[4672]: I1007 15:24:22.761737 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzkxf" event={"ID":"927e330d-8a2c-44c2-ac03-b5fa90ca3bda","Type":"ContainerStarted","Data":"a5b79bea18043e1080bf7df6b2461ecdc3e4ffc0245ec0a179ffe3e6b754a359"} Oct 07 15:24:24 crc kubenswrapper[4672]: I1007 15:24:24.779936 4672 generic.go:334] "Generic (PLEG): container finished" podID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerID="5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01" exitCode=0 Oct 07 15:24:24 crc kubenswrapper[4672]: I1007 15:24:24.780255 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzkxf" event={"ID":"927e330d-8a2c-44c2-ac03-b5fa90ca3bda","Type":"ContainerDied","Data":"5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01"} Oct 07 15:24:25 crc kubenswrapper[4672]: I1007 15:24:25.790940 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzkxf" event={"ID":"927e330d-8a2c-44c2-ac03-b5fa90ca3bda","Type":"ContainerStarted","Data":"b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428"} Oct 07 15:24:25 crc kubenswrapper[4672]: I1007 15:24:25.811528 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wzkxf" podStartSLOduration=2.399745301 podStartE2EDuration="4.811507865s" podCreationTimestamp="2025-10-07 15:24:21 +0000 UTC" firstStartedPulling="2025-10-07 15:24:22.76334943 +0000 UTC m=+2139.738528011" lastFinishedPulling="2025-10-07 15:24:25.175111994 +0000 UTC m=+2142.150290575" observedRunningTime="2025-10-07 15:24:25.808113096 +0000 UTC m=+2142.783291697" watchObservedRunningTime="2025-10-07 15:24:25.811507865 +0000 UTC m=+2142.786686436" Oct 07 15:24:32 crc kubenswrapper[4672]: I1007 15:24:32.107118 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:32 crc kubenswrapper[4672]: I1007 15:24:32.107611 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:32 crc kubenswrapper[4672]: I1007 15:24:32.153700 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:32 crc kubenswrapper[4672]: I1007 15:24:32.885357 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:32 crc kubenswrapper[4672]: I1007 15:24:32.931864 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzkxf"] Oct 07 15:24:34 crc kubenswrapper[4672]: I1007 15:24:34.857272 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wzkxf" podUID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerName="registry-server" containerID="cri-o://b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428" gracePeriod=2 Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.269314 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.355049 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-catalog-content\") pod \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.355124 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mgxj\" (UniqueName: \"kubernetes.io/projected/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-kube-api-access-7mgxj\") pod \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.355165 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-utilities\") pod \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\" (UID: \"927e330d-8a2c-44c2-ac03-b5fa90ca3bda\") " Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.356168 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-utilities" (OuterVolumeSpecName: "utilities") pod "927e330d-8a2c-44c2-ac03-b5fa90ca3bda" (UID: "927e330d-8a2c-44c2-ac03-b5fa90ca3bda"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.361178 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-kube-api-access-7mgxj" (OuterVolumeSpecName: "kube-api-access-7mgxj") pod "927e330d-8a2c-44c2-ac03-b5fa90ca3bda" (UID: "927e330d-8a2c-44c2-ac03-b5fa90ca3bda"). InnerVolumeSpecName "kube-api-access-7mgxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.368583 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "927e330d-8a2c-44c2-ac03-b5fa90ca3bda" (UID: "927e330d-8a2c-44c2-ac03-b5fa90ca3bda"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.456983 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.457304 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mgxj\" (UniqueName: \"kubernetes.io/projected/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-kube-api-access-7mgxj\") on node \"crc\" DevicePath \"\"" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.457321 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927e330d-8a2c-44c2-ac03-b5fa90ca3bda-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.867957 4672 generic.go:334] "Generic (PLEG): container finished" podID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerID="b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428" exitCode=0 Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.868007 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzkxf" event={"ID":"927e330d-8a2c-44c2-ac03-b5fa90ca3bda","Type":"ContainerDied","Data":"b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428"} Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.868101 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzkxf" event={"ID":"927e330d-8a2c-44c2-ac03-b5fa90ca3bda","Type":"ContainerDied","Data":"a5b79bea18043e1080bf7df6b2461ecdc3e4ffc0245ec0a179ffe3e6b754a359"} Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.868124 4672 scope.go:117] "RemoveContainer" containerID="b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.869099 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzkxf" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.891920 4672 scope.go:117] "RemoveContainer" containerID="5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.904965 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzkxf"] Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.908781 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzkxf"] Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.923743 4672 scope.go:117] "RemoveContainer" containerID="ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.962362 4672 scope.go:117] "RemoveContainer" containerID="b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428" Oct 07 15:24:35 crc kubenswrapper[4672]: E1007 15:24:35.962924 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428\": container with ID starting with b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428 not found: ID does not exist" containerID="b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.962985 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428"} err="failed to get container status \"b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428\": rpc error: code = NotFound desc = could not find container \"b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428\": container with ID starting with b90858f934e66b8b2ab099469b0b792fd55bcb67322bc4ab83c02cabe3dea428 not found: ID does not exist" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.963203 4672 scope.go:117] "RemoveContainer" containerID="5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01" Oct 07 15:24:35 crc kubenswrapper[4672]: E1007 15:24:35.964238 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01\": container with ID starting with 5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01 not found: ID does not exist" containerID="5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.964291 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01"} err="failed to get container status \"5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01\": rpc error: code = NotFound desc = could not find container \"5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01\": container with ID starting with 5673043a3e4ac0dbe5cfd77284a95592868218667e9fe9ce12b0bf3e6c0f8e01 not found: ID does not exist" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.964321 4672 scope.go:117] "RemoveContainer" containerID="ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803" Oct 07 15:24:35 crc kubenswrapper[4672]: E1007 15:24:35.964642 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803\": container with ID starting with ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803 not found: ID does not exist" containerID="ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803" Oct 07 15:24:35 crc kubenswrapper[4672]: I1007 15:24:35.964675 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803"} err="failed to get container status \"ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803\": rpc error: code = NotFound desc = could not find container \"ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803\": container with ID starting with ec49c808247335e8167206995aa63802326ca2eaba30a7f8cc0d1dda1b84d803 not found: ID does not exist" Oct 07 15:24:37 crc kubenswrapper[4672]: I1007 15:24:37.902468 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" path="/var/lib/kubelet/pods/927e330d-8a2c-44c2-ac03-b5fa90ca3bda/volumes" Oct 07 15:24:45 crc kubenswrapper[4672]: I1007 15:24:45.969142 4672 generic.go:334] "Generic (PLEG): container finished" podID="04f4d7c8-1cca-4233-9cc5-dfa205f89c49" containerID="f88e908508e0c31b02d52dda43a8b85c8fa513c693027cd0f4fc6aa78fc53fd0" exitCode=0 Oct 07 15:24:45 crc kubenswrapper[4672]: I1007 15:24:45.969264 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" event={"ID":"04f4d7c8-1cca-4233-9cc5-dfa205f89c49","Type":"ContainerDied","Data":"f88e908508e0c31b02d52dda43a8b85c8fa513c693027cd0f4fc6aa78fc53fd0"} Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.453670 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.583798 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovn-combined-ca-bundle\") pod \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.584173 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovncontroller-config-0\") pod \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.584507 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-inventory\") pod \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.584609 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fmpq\" (UniqueName: \"kubernetes.io/projected/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-kube-api-access-5fmpq\") pod \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.584639 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ssh-key\") pod \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\" (UID: \"04f4d7c8-1cca-4233-9cc5-dfa205f89c49\") " Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.590890 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "04f4d7c8-1cca-4233-9cc5-dfa205f89c49" (UID: "04f4d7c8-1cca-4233-9cc5-dfa205f89c49"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.591675 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-kube-api-access-5fmpq" (OuterVolumeSpecName: "kube-api-access-5fmpq") pod "04f4d7c8-1cca-4233-9cc5-dfa205f89c49" (UID: "04f4d7c8-1cca-4233-9cc5-dfa205f89c49"). InnerVolumeSpecName "kube-api-access-5fmpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.610440 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "04f4d7c8-1cca-4233-9cc5-dfa205f89c49" (UID: "04f4d7c8-1cca-4233-9cc5-dfa205f89c49"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.617248 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "04f4d7c8-1cca-4233-9cc5-dfa205f89c49" (UID: "04f4d7c8-1cca-4233-9cc5-dfa205f89c49"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.618301 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-inventory" (OuterVolumeSpecName: "inventory") pod "04f4d7c8-1cca-4233-9cc5-dfa205f89c49" (UID: "04f4d7c8-1cca-4233-9cc5-dfa205f89c49"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.687276 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.687322 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fmpq\" (UniqueName: \"kubernetes.io/projected/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-kube-api-access-5fmpq\") on node \"crc\" DevicePath \"\"" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.687333 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.687343 4672 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.687352 4672 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/04f4d7c8-1cca-4233-9cc5-dfa205f89c49-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.987581 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" event={"ID":"04f4d7c8-1cca-4233-9cc5-dfa205f89c49","Type":"ContainerDied","Data":"5798636fcfa0050dfa538c3d3f593372e3ddd16d7657540dc938078c53bd2f6b"} Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.987629 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5798636fcfa0050dfa538c3d3f593372e3ddd16d7657540dc938078c53bd2f6b" Oct 07 15:24:47 crc kubenswrapper[4672]: I1007 15:24:47.987645 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-lngw9" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.158739 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7"] Oct 07 15:24:48 crc kubenswrapper[4672]: E1007 15:24:48.159216 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04f4d7c8-1cca-4233-9cc5-dfa205f89c49" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.159240 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="04f4d7c8-1cca-4233-9cc5-dfa205f89c49" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 07 15:24:48 crc kubenswrapper[4672]: E1007 15:24:48.159271 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerName="extract-content" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.159280 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerName="extract-content" Oct 07 15:24:48 crc kubenswrapper[4672]: E1007 15:24:48.159294 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerName="extract-utilities" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.159302 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerName="extract-utilities" Oct 07 15:24:48 crc kubenswrapper[4672]: E1007 15:24:48.159322 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerName="registry-server" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.159332 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerName="registry-server" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.159560 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="04f4d7c8-1cca-4233-9cc5-dfa205f89c49" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.159594 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="927e330d-8a2c-44c2-ac03-b5fa90ca3bda" containerName="registry-server" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.160434 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.163587 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.164515 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.164653 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.164768 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.164975 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.165519 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.178621 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7"] Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.298362 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pff26\" (UniqueName: \"kubernetes.io/projected/ebc0c220-98a0-4285-89ec-689749e5f16b-kube-api-access-pff26\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.298447 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.298598 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.298721 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.298780 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.298806 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.400010 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.400089 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.400196 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pff26\" (UniqueName: \"kubernetes.io/projected/ebc0c220-98a0-4285-89ec-689749e5f16b-kube-api-access-pff26\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.400266 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.400302 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.400328 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.405744 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.411822 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.412003 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.412003 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.412284 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.420868 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pff26\" (UniqueName: \"kubernetes.io/projected/ebc0c220-98a0-4285-89ec-689749e5f16b-kube-api-access-pff26\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:48 crc kubenswrapper[4672]: I1007 15:24:48.481233 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:24:49 crc kubenswrapper[4672]: I1007 15:24:49.026445 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7"] Oct 07 15:24:50 crc kubenswrapper[4672]: I1007 15:24:50.006328 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" event={"ID":"ebc0c220-98a0-4285-89ec-689749e5f16b","Type":"ContainerStarted","Data":"e6859b51531bad269fb33bd28efbc0ef79ceb93e7265fa98b57e24aa1862d268"} Oct 07 15:24:50 crc kubenswrapper[4672]: I1007 15:24:50.006888 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" event={"ID":"ebc0c220-98a0-4285-89ec-689749e5f16b","Type":"ContainerStarted","Data":"e2add7c8a2d2f8b0457bf43817ce437f31fce2ec7dfd5ce0813336691e12e76c"} Oct 07 15:24:50 crc kubenswrapper[4672]: I1007 15:24:50.033532 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" podStartSLOduration=1.570149454 podStartE2EDuration="2.033507471s" podCreationTimestamp="2025-10-07 15:24:48 +0000 UTC" firstStartedPulling="2025-10-07 15:24:49.032769708 +0000 UTC m=+2166.007948289" lastFinishedPulling="2025-10-07 15:24:49.496127725 +0000 UTC m=+2166.471306306" observedRunningTime="2025-10-07 15:24:50.021888182 +0000 UTC m=+2166.997066763" watchObservedRunningTime="2025-10-07 15:24:50.033507471 +0000 UTC m=+2167.008686052" Oct 07 15:24:50 crc kubenswrapper[4672]: I1007 15:24:50.791664 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h72j6"] Oct 07 15:24:50 crc kubenswrapper[4672]: I1007 15:24:50.793577 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:50 crc kubenswrapper[4672]: I1007 15:24:50.807004 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h72j6"] Oct 07 15:24:50 crc kubenswrapper[4672]: I1007 15:24:50.957870 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8f8w\" (UniqueName: \"kubernetes.io/projected/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-kube-api-access-n8f8w\") pod \"certified-operators-h72j6\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:50 crc kubenswrapper[4672]: I1007 15:24:50.957950 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-utilities\") pod \"certified-operators-h72j6\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:50 crc kubenswrapper[4672]: I1007 15:24:50.958417 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-catalog-content\") pod \"certified-operators-h72j6\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:51 crc kubenswrapper[4672]: I1007 15:24:51.060244 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8f8w\" (UniqueName: \"kubernetes.io/projected/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-kube-api-access-n8f8w\") pod \"certified-operators-h72j6\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:51 crc kubenswrapper[4672]: I1007 15:24:51.060327 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-utilities\") pod \"certified-operators-h72j6\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:51 crc kubenswrapper[4672]: I1007 15:24:51.060481 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-catalog-content\") pod \"certified-operators-h72j6\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:51 crc kubenswrapper[4672]: I1007 15:24:51.061053 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-catalog-content\") pod \"certified-operators-h72j6\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:51 crc kubenswrapper[4672]: I1007 15:24:51.061636 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-utilities\") pod \"certified-operators-h72j6\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:51 crc kubenswrapper[4672]: I1007 15:24:51.088908 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8f8w\" (UniqueName: \"kubernetes.io/projected/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-kube-api-access-n8f8w\") pod \"certified-operators-h72j6\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:51 crc kubenswrapper[4672]: I1007 15:24:51.110876 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:24:51 crc kubenswrapper[4672]: I1007 15:24:51.626984 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h72j6"] Oct 07 15:24:51 crc kubenswrapper[4672]: W1007 15:24:51.628165 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0dc4b4e2_f75c_41fb_8cdd_6752606a9de9.slice/crio-1449ea88d6a637388613d5835127ba17b8427366095feaf73019228d04a0af48 WatchSource:0}: Error finding container 1449ea88d6a637388613d5835127ba17b8427366095feaf73019228d04a0af48: Status 404 returned error can't find the container with id 1449ea88d6a637388613d5835127ba17b8427366095feaf73019228d04a0af48 Oct 07 15:24:52 crc kubenswrapper[4672]: I1007 15:24:52.025975 4672 generic.go:334] "Generic (PLEG): container finished" podID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerID="d7b7a9bfe8234a6455683ba759b92f7936cdb66d1db7ad1fbd0897e6da531b84" exitCode=0 Oct 07 15:24:52 crc kubenswrapper[4672]: I1007 15:24:52.026195 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h72j6" event={"ID":"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9","Type":"ContainerDied","Data":"d7b7a9bfe8234a6455683ba759b92f7936cdb66d1db7ad1fbd0897e6da531b84"} Oct 07 15:24:52 crc kubenswrapper[4672]: I1007 15:24:52.026220 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h72j6" event={"ID":"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9","Type":"ContainerStarted","Data":"1449ea88d6a637388613d5835127ba17b8427366095feaf73019228d04a0af48"} Oct 07 15:24:54 crc kubenswrapper[4672]: I1007 15:24:54.044260 4672 generic.go:334] "Generic (PLEG): container finished" podID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerID="cc5b3b31846e902430aa944437080eda59e9c04eed036aff68f823fc556049dd" exitCode=0 Oct 07 15:24:54 crc kubenswrapper[4672]: I1007 15:24:54.044486 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h72j6" event={"ID":"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9","Type":"ContainerDied","Data":"cc5b3b31846e902430aa944437080eda59e9c04eed036aff68f823fc556049dd"} Oct 07 15:24:55 crc kubenswrapper[4672]: I1007 15:24:55.053465 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h72j6" event={"ID":"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9","Type":"ContainerStarted","Data":"6bc531e7968a1479989e031918dec9ddd7e16a9ac35fdd54cbc659669240ce45"} Oct 07 15:24:55 crc kubenswrapper[4672]: I1007 15:24:55.069188 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h72j6" podStartSLOduration=2.639178007 podStartE2EDuration="5.069169193s" podCreationTimestamp="2025-10-07 15:24:50 +0000 UTC" firstStartedPulling="2025-10-07 15:24:52.028749572 +0000 UTC m=+2169.003928154" lastFinishedPulling="2025-10-07 15:24:54.458740769 +0000 UTC m=+2171.433919340" observedRunningTime="2025-10-07 15:24:55.06871575 +0000 UTC m=+2172.043894331" watchObservedRunningTime="2025-10-07 15:24:55.069169193 +0000 UTC m=+2172.044347774" Oct 07 15:24:56 crc kubenswrapper[4672]: I1007 15:24:56.650057 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:24:56 crc kubenswrapper[4672]: I1007 15:24:56.650394 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:25:01 crc kubenswrapper[4672]: I1007 15:25:01.111052 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:25:01 crc kubenswrapper[4672]: I1007 15:25:01.111563 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:25:01 crc kubenswrapper[4672]: I1007 15:25:01.154788 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:25:02 crc kubenswrapper[4672]: I1007 15:25:02.171288 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:25:02 crc kubenswrapper[4672]: I1007 15:25:02.225663 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h72j6"] Oct 07 15:25:04 crc kubenswrapper[4672]: I1007 15:25:04.124492 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h72j6" podUID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerName="registry-server" containerID="cri-o://6bc531e7968a1479989e031918dec9ddd7e16a9ac35fdd54cbc659669240ce45" gracePeriod=2 Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.135473 4672 generic.go:334] "Generic (PLEG): container finished" podID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerID="6bc531e7968a1479989e031918dec9ddd7e16a9ac35fdd54cbc659669240ce45" exitCode=0 Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.135522 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h72j6" event={"ID":"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9","Type":"ContainerDied","Data":"6bc531e7968a1479989e031918dec9ddd7e16a9ac35fdd54cbc659669240ce45"} Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.649980 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.727064 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-catalog-content\") pod \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.727194 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8f8w\" (UniqueName: \"kubernetes.io/projected/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-kube-api-access-n8f8w\") pod \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.727222 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-utilities\") pod \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\" (UID: \"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9\") " Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.728023 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-utilities" (OuterVolumeSpecName: "utilities") pod "0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" (UID: "0dc4b4e2-f75c-41fb-8cdd-6752606a9de9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.732821 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-kube-api-access-n8f8w" (OuterVolumeSpecName: "kube-api-access-n8f8w") pod "0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" (UID: "0dc4b4e2-f75c-41fb-8cdd-6752606a9de9"). InnerVolumeSpecName "kube-api-access-n8f8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.775742 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" (UID: "0dc4b4e2-f75c-41fb-8cdd-6752606a9de9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.828928 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8f8w\" (UniqueName: \"kubernetes.io/projected/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-kube-api-access-n8f8w\") on node \"crc\" DevicePath \"\"" Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.828971 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:25:05 crc kubenswrapper[4672]: I1007 15:25:05.828986 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:25:06 crc kubenswrapper[4672]: I1007 15:25:06.157601 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h72j6" event={"ID":"0dc4b4e2-f75c-41fb-8cdd-6752606a9de9","Type":"ContainerDied","Data":"1449ea88d6a637388613d5835127ba17b8427366095feaf73019228d04a0af48"} Oct 07 15:25:06 crc kubenswrapper[4672]: I1007 15:25:06.157663 4672 scope.go:117] "RemoveContainer" containerID="6bc531e7968a1479989e031918dec9ddd7e16a9ac35fdd54cbc659669240ce45" Oct 07 15:25:06 crc kubenswrapper[4672]: I1007 15:25:06.157881 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h72j6" Oct 07 15:25:06 crc kubenswrapper[4672]: I1007 15:25:06.180133 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h72j6"] Oct 07 15:25:06 crc kubenswrapper[4672]: I1007 15:25:06.184133 4672 scope.go:117] "RemoveContainer" containerID="cc5b3b31846e902430aa944437080eda59e9c04eed036aff68f823fc556049dd" Oct 07 15:25:06 crc kubenswrapper[4672]: I1007 15:25:06.186808 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h72j6"] Oct 07 15:25:06 crc kubenswrapper[4672]: I1007 15:25:06.208823 4672 scope.go:117] "RemoveContainer" containerID="d7b7a9bfe8234a6455683ba759b92f7936cdb66d1db7ad1fbd0897e6da531b84" Oct 07 15:25:07 crc kubenswrapper[4672]: I1007 15:25:07.902311 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" path="/var/lib/kubelet/pods/0dc4b4e2-f75c-41fb-8cdd-6752606a9de9/volumes" Oct 07 15:25:26 crc kubenswrapper[4672]: I1007 15:25:26.650875 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:25:26 crc kubenswrapper[4672]: I1007 15:25:26.651419 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:25:36 crc kubenswrapper[4672]: I1007 15:25:36.453682 4672 generic.go:334] "Generic (PLEG): container finished" podID="ebc0c220-98a0-4285-89ec-689749e5f16b" containerID="e6859b51531bad269fb33bd28efbc0ef79ceb93e7265fa98b57e24aa1862d268" exitCode=0 Oct 07 15:25:36 crc kubenswrapper[4672]: I1007 15:25:36.453777 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" event={"ID":"ebc0c220-98a0-4285-89ec-689749e5f16b","Type":"ContainerDied","Data":"e6859b51531bad269fb33bd28efbc0ef79ceb93e7265fa98b57e24aa1862d268"} Oct 07 15:25:37 crc kubenswrapper[4672]: I1007 15:25:37.851178 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.021187 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-inventory\") pod \"ebc0c220-98a0-4285-89ec-689749e5f16b\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.021356 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-ssh-key\") pod \"ebc0c220-98a0-4285-89ec-689749e5f16b\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.021409 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-nova-metadata-neutron-config-0\") pod \"ebc0c220-98a0-4285-89ec-689749e5f16b\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.021443 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-metadata-combined-ca-bundle\") pod \"ebc0c220-98a0-4285-89ec-689749e5f16b\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.021524 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"ebc0c220-98a0-4285-89ec-689749e5f16b\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.021550 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pff26\" (UniqueName: \"kubernetes.io/projected/ebc0c220-98a0-4285-89ec-689749e5f16b-kube-api-access-pff26\") pod \"ebc0c220-98a0-4285-89ec-689749e5f16b\" (UID: \"ebc0c220-98a0-4285-89ec-689749e5f16b\") " Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.030209 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ebc0c220-98a0-4285-89ec-689749e5f16b" (UID: "ebc0c220-98a0-4285-89ec-689749e5f16b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.038586 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebc0c220-98a0-4285-89ec-689749e5f16b-kube-api-access-pff26" (OuterVolumeSpecName: "kube-api-access-pff26") pod "ebc0c220-98a0-4285-89ec-689749e5f16b" (UID: "ebc0c220-98a0-4285-89ec-689749e5f16b"). InnerVolumeSpecName "kube-api-access-pff26". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.052795 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "ebc0c220-98a0-4285-89ec-689749e5f16b" (UID: "ebc0c220-98a0-4285-89ec-689749e5f16b"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.053253 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "ebc0c220-98a0-4285-89ec-689749e5f16b" (UID: "ebc0c220-98a0-4285-89ec-689749e5f16b"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.053930 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ebc0c220-98a0-4285-89ec-689749e5f16b" (UID: "ebc0c220-98a0-4285-89ec-689749e5f16b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.058465 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-inventory" (OuterVolumeSpecName: "inventory") pod "ebc0c220-98a0-4285-89ec-689749e5f16b" (UID: "ebc0c220-98a0-4285-89ec-689749e5f16b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.123281 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.123329 4672 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.123345 4672 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.123359 4672 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.123374 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pff26\" (UniqueName: \"kubernetes.io/projected/ebc0c220-98a0-4285-89ec-689749e5f16b-kube-api-access-pff26\") on node \"crc\" DevicePath \"\"" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.123385 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebc0c220-98a0-4285-89ec-689749e5f16b-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.471505 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" event={"ID":"ebc0c220-98a0-4285-89ec-689749e5f16b","Type":"ContainerDied","Data":"e2add7c8a2d2f8b0457bf43817ce437f31fce2ec7dfd5ce0813336691e12e76c"} Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.471813 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2add7c8a2d2f8b0457bf43817ce437f31fce2ec7dfd5ce0813336691e12e76c" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.471563 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.570923 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd"] Oct 07 15:25:38 crc kubenswrapper[4672]: E1007 15:25:38.571352 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerName="registry-server" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.571394 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerName="registry-server" Oct 07 15:25:38 crc kubenswrapper[4672]: E1007 15:25:38.571411 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerName="extract-utilities" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.571418 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerName="extract-utilities" Oct 07 15:25:38 crc kubenswrapper[4672]: E1007 15:25:38.571439 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerName="extract-content" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.571445 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerName="extract-content" Oct 07 15:25:38 crc kubenswrapper[4672]: E1007 15:25:38.571472 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebc0c220-98a0-4285-89ec-689749e5f16b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.571479 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebc0c220-98a0-4285-89ec-689749e5f16b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.571676 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dc4b4e2-f75c-41fb-8cdd-6752606a9de9" containerName="registry-server" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.571701 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebc0c220-98a0-4285-89ec-689749e5f16b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.572388 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.575004 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.575254 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.575447 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.575621 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.575827 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.583826 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd"] Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.735582 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.735728 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.735758 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.735812 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.735837 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkrbd\" (UniqueName: \"kubernetes.io/projected/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-kube-api-access-tkrbd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.837365 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.837447 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.837468 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.837511 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.837532 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkrbd\" (UniqueName: \"kubernetes.io/projected/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-kube-api-access-tkrbd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.841806 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.842260 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.846651 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.852301 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.853767 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkrbd\" (UniqueName: \"kubernetes.io/projected/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-kube-api-access-tkrbd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-wstfd\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:38 crc kubenswrapper[4672]: I1007 15:25:38.892068 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:25:39 crc kubenswrapper[4672]: I1007 15:25:39.408950 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd"] Oct 07 15:25:39 crc kubenswrapper[4672]: I1007 15:25:39.481371 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" event={"ID":"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841","Type":"ContainerStarted","Data":"726abc7325d5f71dc9bece5efb3b7666afa80df90242fa30bb0f7e7d93719e57"} Oct 07 15:25:41 crc kubenswrapper[4672]: I1007 15:25:41.501695 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" event={"ID":"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841","Type":"ContainerStarted","Data":"57bb579f21d182396fbbe72a1ebd7aa51df3ac8fe201ab9936a7ae3ec0987f1d"} Oct 07 15:25:41 crc kubenswrapper[4672]: I1007 15:25:41.518888 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" podStartSLOduration=2.551431779 podStartE2EDuration="3.51886857s" podCreationTimestamp="2025-10-07 15:25:38 +0000 UTC" firstStartedPulling="2025-10-07 15:25:39.417111462 +0000 UTC m=+2216.392290043" lastFinishedPulling="2025-10-07 15:25:40.384548253 +0000 UTC m=+2217.359726834" observedRunningTime="2025-10-07 15:25:41.516933204 +0000 UTC m=+2218.492111785" watchObservedRunningTime="2025-10-07 15:25:41.51886857 +0000 UTC m=+2218.494047151" Oct 07 15:25:56 crc kubenswrapper[4672]: I1007 15:25:56.650454 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:25:56 crc kubenswrapper[4672]: I1007 15:25:56.651046 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:25:56 crc kubenswrapper[4672]: I1007 15:25:56.651124 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:25:56 crc kubenswrapper[4672]: I1007 15:25:56.652118 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:25:56 crc kubenswrapper[4672]: I1007 15:25:56.652174 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" gracePeriod=600 Oct 07 15:25:56 crc kubenswrapper[4672]: E1007 15:25:56.779319 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:25:57 crc kubenswrapper[4672]: I1007 15:25:57.642972 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" exitCode=0 Oct 07 15:25:57 crc kubenswrapper[4672]: I1007 15:25:57.643053 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1"} Oct 07 15:25:57 crc kubenswrapper[4672]: I1007 15:25:57.643115 4672 scope.go:117] "RemoveContainer" containerID="1f0049b9a568b508db3acc2c4e9d42f1ee71b196cad6504e841571287e2104a4" Oct 07 15:25:57 crc kubenswrapper[4672]: I1007 15:25:57.643877 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:25:57 crc kubenswrapper[4672]: E1007 15:25:57.644307 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:26:12 crc kubenswrapper[4672]: I1007 15:26:12.894415 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:26:12 crc kubenswrapper[4672]: E1007 15:26:12.895574 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:26:23 crc kubenswrapper[4672]: I1007 15:26:23.898087 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:26:23 crc kubenswrapper[4672]: E1007 15:26:23.898972 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:26:36 crc kubenswrapper[4672]: I1007 15:26:36.892194 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:26:36 crc kubenswrapper[4672]: E1007 15:26:36.893042 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:26:49 crc kubenswrapper[4672]: I1007 15:26:49.892441 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:26:49 crc kubenswrapper[4672]: E1007 15:26:49.893240 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:27:00 crc kubenswrapper[4672]: I1007 15:27:00.892816 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:27:00 crc kubenswrapper[4672]: E1007 15:27:00.893627 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:27:15 crc kubenswrapper[4672]: I1007 15:27:15.892673 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:27:15 crc kubenswrapper[4672]: E1007 15:27:15.893687 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.488011 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kdgjt"] Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.499682 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.520180 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kdgjt"] Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.629691 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-catalog-content\") pod \"community-operators-kdgjt\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.629968 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-utilities\") pod \"community-operators-kdgjt\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.630050 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxwq7\" (UniqueName: \"kubernetes.io/projected/7039e5bd-977f-4245-80d5-b170e303fff0-kube-api-access-vxwq7\") pod \"community-operators-kdgjt\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.731537 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-catalog-content\") pod \"community-operators-kdgjt\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.731999 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-utilities\") pod \"community-operators-kdgjt\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.732050 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxwq7\" (UniqueName: \"kubernetes.io/projected/7039e5bd-977f-4245-80d5-b170e303fff0-kube-api-access-vxwq7\") pod \"community-operators-kdgjt\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.732082 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-catalog-content\") pod \"community-operators-kdgjt\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.732363 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-utilities\") pod \"community-operators-kdgjt\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.752786 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxwq7\" (UniqueName: \"kubernetes.io/projected/7039e5bd-977f-4245-80d5-b170e303fff0-kube-api-access-vxwq7\") pod \"community-operators-kdgjt\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:24 crc kubenswrapper[4672]: I1007 15:27:24.857449 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:25 crc kubenswrapper[4672]: I1007 15:27:25.163444 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kdgjt"] Oct 07 15:27:25 crc kubenswrapper[4672]: I1007 15:27:25.462977 4672 generic.go:334] "Generic (PLEG): container finished" podID="7039e5bd-977f-4245-80d5-b170e303fff0" containerID="e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0" exitCode=0 Oct 07 15:27:25 crc kubenswrapper[4672]: I1007 15:27:25.463068 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdgjt" event={"ID":"7039e5bd-977f-4245-80d5-b170e303fff0","Type":"ContainerDied","Data":"e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0"} Oct 07 15:27:25 crc kubenswrapper[4672]: I1007 15:27:25.463428 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdgjt" event={"ID":"7039e5bd-977f-4245-80d5-b170e303fff0","Type":"ContainerStarted","Data":"9dace903b39116544f7b52c8fdcdafb6e7882ddb79791ee1a4e0d5b49432d613"} Oct 07 15:27:27 crc kubenswrapper[4672]: I1007 15:27:27.482652 4672 generic.go:334] "Generic (PLEG): container finished" podID="7039e5bd-977f-4245-80d5-b170e303fff0" containerID="447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420" exitCode=0 Oct 07 15:27:27 crc kubenswrapper[4672]: I1007 15:27:27.482732 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdgjt" event={"ID":"7039e5bd-977f-4245-80d5-b170e303fff0","Type":"ContainerDied","Data":"447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420"} Oct 07 15:27:28 crc kubenswrapper[4672]: I1007 15:27:28.495505 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdgjt" event={"ID":"7039e5bd-977f-4245-80d5-b170e303fff0","Type":"ContainerStarted","Data":"3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f"} Oct 07 15:27:28 crc kubenswrapper[4672]: I1007 15:27:28.521858 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kdgjt" podStartSLOduration=2.137473601 podStartE2EDuration="4.521838546s" podCreationTimestamp="2025-10-07 15:27:24 +0000 UTC" firstStartedPulling="2025-10-07 15:27:25.46679333 +0000 UTC m=+2322.441971911" lastFinishedPulling="2025-10-07 15:27:27.851158275 +0000 UTC m=+2324.826336856" observedRunningTime="2025-10-07 15:27:28.514323267 +0000 UTC m=+2325.489501878" watchObservedRunningTime="2025-10-07 15:27:28.521838546 +0000 UTC m=+2325.497017127" Oct 07 15:27:29 crc kubenswrapper[4672]: I1007 15:27:29.892989 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:27:29 crc kubenswrapper[4672]: E1007 15:27:29.893927 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:27:34 crc kubenswrapper[4672]: I1007 15:27:34.857704 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:34 crc kubenswrapper[4672]: I1007 15:27:34.858097 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:34 crc kubenswrapper[4672]: I1007 15:27:34.900253 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:35 crc kubenswrapper[4672]: I1007 15:27:35.604067 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:35 crc kubenswrapper[4672]: I1007 15:27:35.647363 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kdgjt"] Oct 07 15:27:37 crc kubenswrapper[4672]: I1007 15:27:37.575215 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kdgjt" podUID="7039e5bd-977f-4245-80d5-b170e303fff0" containerName="registry-server" containerID="cri-o://3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f" gracePeriod=2 Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.547923 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.597869 4672 generic.go:334] "Generic (PLEG): container finished" podID="7039e5bd-977f-4245-80d5-b170e303fff0" containerID="3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f" exitCode=0 Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.597928 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdgjt" event={"ID":"7039e5bd-977f-4245-80d5-b170e303fff0","Type":"ContainerDied","Data":"3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f"} Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.597958 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdgjt" event={"ID":"7039e5bd-977f-4245-80d5-b170e303fff0","Type":"ContainerDied","Data":"9dace903b39116544f7b52c8fdcdafb6e7882ddb79791ee1a4e0d5b49432d613"} Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.597957 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kdgjt" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.597973 4672 scope.go:117] "RemoveContainer" containerID="3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.617422 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-catalog-content\") pod \"7039e5bd-977f-4245-80d5-b170e303fff0\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.617519 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-utilities\") pod \"7039e5bd-977f-4245-80d5-b170e303fff0\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.617735 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxwq7\" (UniqueName: \"kubernetes.io/projected/7039e5bd-977f-4245-80d5-b170e303fff0-kube-api-access-vxwq7\") pod \"7039e5bd-977f-4245-80d5-b170e303fff0\" (UID: \"7039e5bd-977f-4245-80d5-b170e303fff0\") " Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.618331 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-utilities" (OuterVolumeSpecName: "utilities") pod "7039e5bd-977f-4245-80d5-b170e303fff0" (UID: "7039e5bd-977f-4245-80d5-b170e303fff0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.624009 4672 scope.go:117] "RemoveContainer" containerID="447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.625047 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7039e5bd-977f-4245-80d5-b170e303fff0-kube-api-access-vxwq7" (OuterVolumeSpecName: "kube-api-access-vxwq7") pod "7039e5bd-977f-4245-80d5-b170e303fff0" (UID: "7039e5bd-977f-4245-80d5-b170e303fff0"). InnerVolumeSpecName "kube-api-access-vxwq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.671641 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7039e5bd-977f-4245-80d5-b170e303fff0" (UID: "7039e5bd-977f-4245-80d5-b170e303fff0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.686686 4672 scope.go:117] "RemoveContainer" containerID="e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.719878 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.719930 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7039e5bd-977f-4245-80d5-b170e303fff0-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.719946 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxwq7\" (UniqueName: \"kubernetes.io/projected/7039e5bd-977f-4245-80d5-b170e303fff0-kube-api-access-vxwq7\") on node \"crc\" DevicePath \"\"" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.730110 4672 scope.go:117] "RemoveContainer" containerID="3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f" Oct 07 15:27:39 crc kubenswrapper[4672]: E1007 15:27:39.731917 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f\": container with ID starting with 3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f not found: ID does not exist" containerID="3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.731973 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f"} err="failed to get container status \"3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f\": rpc error: code = NotFound desc = could not find container \"3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f\": container with ID starting with 3c4b33dcafdb00371e4650bef43081c5c81afc6208dae148bba182cb3fc5cd2f not found: ID does not exist" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.732006 4672 scope.go:117] "RemoveContainer" containerID="447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420" Oct 07 15:27:39 crc kubenswrapper[4672]: E1007 15:27:39.733988 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420\": container with ID starting with 447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420 not found: ID does not exist" containerID="447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.734031 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420"} err="failed to get container status \"447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420\": rpc error: code = NotFound desc = could not find container \"447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420\": container with ID starting with 447c9ec82290c84e142e11d54207624769bbdf167ac1ef5137337be61b840420 not found: ID does not exist" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.734051 4672 scope.go:117] "RemoveContainer" containerID="e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0" Oct 07 15:27:39 crc kubenswrapper[4672]: E1007 15:27:39.735136 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0\": container with ID starting with e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0 not found: ID does not exist" containerID="e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.735161 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0"} err="failed to get container status \"e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0\": rpc error: code = NotFound desc = could not find container \"e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0\": container with ID starting with e9c1570d4281e2d367040508e96cf7d041b05ca531d9eeee0371da1b5203a4e0 not found: ID does not exist" Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.938372 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kdgjt"] Oct 07 15:27:39 crc kubenswrapper[4672]: I1007 15:27:39.951379 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kdgjt"] Oct 07 15:27:41 crc kubenswrapper[4672]: I1007 15:27:41.903190 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7039e5bd-977f-4245-80d5-b170e303fff0" path="/var/lib/kubelet/pods/7039e5bd-977f-4245-80d5-b170e303fff0/volumes" Oct 07 15:27:43 crc kubenswrapper[4672]: I1007 15:27:43.899537 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:27:43 crc kubenswrapper[4672]: E1007 15:27:43.900296 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:27:57 crc kubenswrapper[4672]: I1007 15:27:57.891669 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:27:57 crc kubenswrapper[4672]: E1007 15:27:57.892740 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:28:08 crc kubenswrapper[4672]: I1007 15:28:08.892544 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:28:08 crc kubenswrapper[4672]: E1007 15:28:08.893537 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:28:22 crc kubenswrapper[4672]: I1007 15:28:22.892273 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:28:22 crc kubenswrapper[4672]: E1007 15:28:22.893343 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:28:35 crc kubenswrapper[4672]: I1007 15:28:35.892095 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:28:35 crc kubenswrapper[4672]: E1007 15:28:35.893281 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:28:46 crc kubenswrapper[4672]: I1007 15:28:46.892558 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:28:46 crc kubenswrapper[4672]: E1007 15:28:46.893509 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:28:57 crc kubenswrapper[4672]: I1007 15:28:57.891581 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:28:57 crc kubenswrapper[4672]: E1007 15:28:57.892735 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:29:09 crc kubenswrapper[4672]: I1007 15:29:09.892152 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:29:09 crc kubenswrapper[4672]: E1007 15:29:09.893204 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:29:24 crc kubenswrapper[4672]: I1007 15:29:24.891735 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:29:24 crc kubenswrapper[4672]: E1007 15:29:24.892930 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:29:38 crc kubenswrapper[4672]: I1007 15:29:38.891697 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:29:38 crc kubenswrapper[4672]: E1007 15:29:38.892836 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:29:52 crc kubenswrapper[4672]: I1007 15:29:52.892673 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:29:52 crc kubenswrapper[4672]: E1007 15:29:52.893920 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:29:53 crc kubenswrapper[4672]: I1007 15:29:53.778773 4672 generic.go:334] "Generic (PLEG): container finished" podID="0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841" containerID="57bb579f21d182396fbbe72a1ebd7aa51df3ac8fe201ab9936a7ae3ec0987f1d" exitCode=0 Oct 07 15:29:53 crc kubenswrapper[4672]: I1007 15:29:53.778832 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" event={"ID":"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841","Type":"ContainerDied","Data":"57bb579f21d182396fbbe72a1ebd7aa51df3ac8fe201ab9936a7ae3ec0987f1d"} Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.187577 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.295355 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-inventory\") pod \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.295638 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-combined-ca-bundle\") pod \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.295808 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkrbd\" (UniqueName: \"kubernetes.io/projected/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-kube-api-access-tkrbd\") pod \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.295980 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-secret-0\") pod \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.296291 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-ssh-key\") pod \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\" (UID: \"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841\") " Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.301782 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-kube-api-access-tkrbd" (OuterVolumeSpecName: "kube-api-access-tkrbd") pod "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841" (UID: "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841"). InnerVolumeSpecName "kube-api-access-tkrbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.304331 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841" (UID: "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.324234 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-inventory" (OuterVolumeSpecName: "inventory") pod "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841" (UID: "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.324678 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841" (UID: "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.328484 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841" (UID: "0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.400188 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.400247 4672 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.400269 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkrbd\" (UniqueName: \"kubernetes.io/projected/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-kube-api-access-tkrbd\") on node \"crc\" DevicePath \"\"" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.400286 4672 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.400305 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.795958 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" event={"ID":"0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841","Type":"ContainerDied","Data":"726abc7325d5f71dc9bece5efb3b7666afa80df90242fa30bb0f7e7d93719e57"} Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.796449 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="726abc7325d5f71dc9bece5efb3b7666afa80df90242fa30bb0f7e7d93719e57" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.795988 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-wstfd" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.902183 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn"] Oct 07 15:29:55 crc kubenswrapper[4672]: E1007 15:29:55.902463 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7039e5bd-977f-4245-80d5-b170e303fff0" containerName="extract-utilities" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.902475 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7039e5bd-977f-4245-80d5-b170e303fff0" containerName="extract-utilities" Oct 07 15:29:55 crc kubenswrapper[4672]: E1007 15:29:55.902489 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7039e5bd-977f-4245-80d5-b170e303fff0" containerName="extract-content" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.902496 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7039e5bd-977f-4245-80d5-b170e303fff0" containerName="extract-content" Oct 07 15:29:55 crc kubenswrapper[4672]: E1007 15:29:55.902507 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7039e5bd-977f-4245-80d5-b170e303fff0" containerName="registry-server" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.902515 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7039e5bd-977f-4245-80d5-b170e303fff0" containerName="registry-server" Oct 07 15:29:55 crc kubenswrapper[4672]: E1007 15:29:55.902536 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.902543 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.902736 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7039e5bd-977f-4245-80d5-b170e303fff0" containerName="registry-server" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.902750 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.903338 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn"] Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.903445 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.906738 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.907052 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.907155 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.907243 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.907422 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.907675 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:29:55 crc kubenswrapper[4672]: I1007 15:29:55.913756 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.010439 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.010504 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.010533 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.010623 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.010684 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.010711 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.010738 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmcw7\" (UniqueName: \"kubernetes.io/projected/be41884e-d5f3-42e7-bd95-3c04629e26e3-kube-api-access-xmcw7\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.011164 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.011392 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.113513 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.113611 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.113652 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.113677 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.113718 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.113763 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.113789 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.113817 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmcw7\" (UniqueName: \"kubernetes.io/projected/be41884e-d5f3-42e7-bd95-3c04629e26e3-kube-api-access-xmcw7\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.113903 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.116062 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.117722 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.118155 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.118515 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.119276 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.122165 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.126126 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.127950 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.133860 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmcw7\" (UniqueName: \"kubernetes.io/projected/be41884e-d5f3-42e7-bd95-3c04629e26e3-kube-api-access-xmcw7\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2s7mn\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.229718 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.749865 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn"] Oct 07 15:29:56 crc kubenswrapper[4672]: W1007 15:29:56.753891 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe41884e_d5f3_42e7_bd95_3c04629e26e3.slice/crio-4ddd2e0a924f4ccdf8994506a15bf10e1b6d454f1a877781681b9683efdd182c WatchSource:0}: Error finding container 4ddd2e0a924f4ccdf8994506a15bf10e1b6d454f1a877781681b9683efdd182c: Status 404 returned error can't find the container with id 4ddd2e0a924f4ccdf8994506a15bf10e1b6d454f1a877781681b9683efdd182c Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.756496 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 15:29:56 crc kubenswrapper[4672]: I1007 15:29:56.806327 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" event={"ID":"be41884e-d5f3-42e7-bd95-3c04629e26e3","Type":"ContainerStarted","Data":"4ddd2e0a924f4ccdf8994506a15bf10e1b6d454f1a877781681b9683efdd182c"} Oct 07 15:29:57 crc kubenswrapper[4672]: I1007 15:29:57.820714 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" event={"ID":"be41884e-d5f3-42e7-bd95-3c04629e26e3","Type":"ContainerStarted","Data":"f0005d39538435e36aa393096a2aa7e0173b7d1a15701acd08f88fceefc7b605"} Oct 07 15:29:57 crc kubenswrapper[4672]: I1007 15:29:57.843906 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" podStartSLOduration=2.180899065 podStartE2EDuration="2.84388s" podCreationTimestamp="2025-10-07 15:29:55 +0000 UTC" firstStartedPulling="2025-10-07 15:29:56.756246615 +0000 UTC m=+2473.731425196" lastFinishedPulling="2025-10-07 15:29:57.41922755 +0000 UTC m=+2474.394406131" observedRunningTime="2025-10-07 15:29:57.837906276 +0000 UTC m=+2474.813084877" watchObservedRunningTime="2025-10-07 15:29:57.84388 +0000 UTC m=+2474.819058581" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.166402 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc"] Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.169485 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.178377 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.178387 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.182947 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc"] Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.295302 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39920f5a-a280-412f-a27e-23a7c0bb83e1-secret-volume\") pod \"collect-profiles-29330850-2q9cc\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.295826 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39920f5a-a280-412f-a27e-23a7c0bb83e1-config-volume\") pod \"collect-profiles-29330850-2q9cc\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.296095 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsc67\" (UniqueName: \"kubernetes.io/projected/39920f5a-a280-412f-a27e-23a7c0bb83e1-kube-api-access-qsc67\") pod \"collect-profiles-29330850-2q9cc\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.398324 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39920f5a-a280-412f-a27e-23a7c0bb83e1-config-volume\") pod \"collect-profiles-29330850-2q9cc\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.398479 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsc67\" (UniqueName: \"kubernetes.io/projected/39920f5a-a280-412f-a27e-23a7c0bb83e1-kube-api-access-qsc67\") pod \"collect-profiles-29330850-2q9cc\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.398993 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39920f5a-a280-412f-a27e-23a7c0bb83e1-secret-volume\") pod \"collect-profiles-29330850-2q9cc\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.399376 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39920f5a-a280-412f-a27e-23a7c0bb83e1-config-volume\") pod \"collect-profiles-29330850-2q9cc\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.412848 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39920f5a-a280-412f-a27e-23a7c0bb83e1-secret-volume\") pod \"collect-profiles-29330850-2q9cc\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.415757 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsc67\" (UniqueName: \"kubernetes.io/projected/39920f5a-a280-412f-a27e-23a7c0bb83e1-kube-api-access-qsc67\") pod \"collect-profiles-29330850-2q9cc\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.510700 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:00 crc kubenswrapper[4672]: I1007 15:30:00.970703 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc"] Oct 07 15:30:00 crc kubenswrapper[4672]: W1007 15:30:00.974561 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39920f5a_a280_412f_a27e_23a7c0bb83e1.slice/crio-8fff7a3f80609ed1dc10717d4ecf67eb65df7ee9adc7c775d0b0b822897a98b5 WatchSource:0}: Error finding container 8fff7a3f80609ed1dc10717d4ecf67eb65df7ee9adc7c775d0b0b822897a98b5: Status 404 returned error can't find the container with id 8fff7a3f80609ed1dc10717d4ecf67eb65df7ee9adc7c775d0b0b822897a98b5 Oct 07 15:30:01 crc kubenswrapper[4672]: I1007 15:30:01.860713 4672 generic.go:334] "Generic (PLEG): container finished" podID="39920f5a-a280-412f-a27e-23a7c0bb83e1" containerID="5fe9ac8b7bd4738ed39dcdb6221d48d52a90d775c248777db238f73afd909567" exitCode=0 Oct 07 15:30:01 crc kubenswrapper[4672]: I1007 15:30:01.860820 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" event={"ID":"39920f5a-a280-412f-a27e-23a7c0bb83e1","Type":"ContainerDied","Data":"5fe9ac8b7bd4738ed39dcdb6221d48d52a90d775c248777db238f73afd909567"} Oct 07 15:30:01 crc kubenswrapper[4672]: I1007 15:30:01.861218 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" event={"ID":"39920f5a-a280-412f-a27e-23a7c0bb83e1","Type":"ContainerStarted","Data":"8fff7a3f80609ed1dc10717d4ecf67eb65df7ee9adc7c775d0b0b822897a98b5"} Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.197240 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.358921 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsc67\" (UniqueName: \"kubernetes.io/projected/39920f5a-a280-412f-a27e-23a7c0bb83e1-kube-api-access-qsc67\") pod \"39920f5a-a280-412f-a27e-23a7c0bb83e1\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.359468 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39920f5a-a280-412f-a27e-23a7c0bb83e1-secret-volume\") pod \"39920f5a-a280-412f-a27e-23a7c0bb83e1\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.359621 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39920f5a-a280-412f-a27e-23a7c0bb83e1-config-volume\") pod \"39920f5a-a280-412f-a27e-23a7c0bb83e1\" (UID: \"39920f5a-a280-412f-a27e-23a7c0bb83e1\") " Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.360567 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39920f5a-a280-412f-a27e-23a7c0bb83e1-config-volume" (OuterVolumeSpecName: "config-volume") pod "39920f5a-a280-412f-a27e-23a7c0bb83e1" (UID: "39920f5a-a280-412f-a27e-23a7c0bb83e1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.365436 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39920f5a-a280-412f-a27e-23a7c0bb83e1-kube-api-access-qsc67" (OuterVolumeSpecName: "kube-api-access-qsc67") pod "39920f5a-a280-412f-a27e-23a7c0bb83e1" (UID: "39920f5a-a280-412f-a27e-23a7c0bb83e1"). InnerVolumeSpecName "kube-api-access-qsc67". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.365772 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39920f5a-a280-412f-a27e-23a7c0bb83e1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "39920f5a-a280-412f-a27e-23a7c0bb83e1" (UID: "39920f5a-a280-412f-a27e-23a7c0bb83e1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.461607 4672 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39920f5a-a280-412f-a27e-23a7c0bb83e1-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.461645 4672 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39920f5a-a280-412f-a27e-23a7c0bb83e1-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.461656 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsc67\" (UniqueName: \"kubernetes.io/projected/39920f5a-a280-412f-a27e-23a7c0bb83e1-kube-api-access-qsc67\") on node \"crc\" DevicePath \"\"" Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.878816 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" event={"ID":"39920f5a-a280-412f-a27e-23a7c0bb83e1","Type":"ContainerDied","Data":"8fff7a3f80609ed1dc10717d4ecf67eb65df7ee9adc7c775d0b0b822897a98b5"} Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.878862 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fff7a3f80609ed1dc10717d4ecf67eb65df7ee9adc7c775d0b0b822897a98b5" Oct 07 15:30:03 crc kubenswrapper[4672]: I1007 15:30:03.878906 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330850-2q9cc" Oct 07 15:30:04 crc kubenswrapper[4672]: I1007 15:30:04.271081 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf"] Oct 07 15:30:04 crc kubenswrapper[4672]: I1007 15:30:04.281494 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330805-2bgnf"] Oct 07 15:30:05 crc kubenswrapper[4672]: I1007 15:30:05.893737 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:30:05 crc kubenswrapper[4672]: E1007 15:30:05.894373 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:30:05 crc kubenswrapper[4672]: I1007 15:30:05.902158 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3d69d29-e875-49dd-8929-40ac7f00470d" path="/var/lib/kubelet/pods/c3d69d29-e875-49dd-8929-40ac7f00470d/volumes" Oct 07 15:30:16 crc kubenswrapper[4672]: I1007 15:30:16.892073 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:30:16 crc kubenswrapper[4672]: E1007 15:30:16.892812 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:30:31 crc kubenswrapper[4672]: I1007 15:30:31.892637 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:30:31 crc kubenswrapper[4672]: E1007 15:30:31.894307 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:30:44 crc kubenswrapper[4672]: I1007 15:30:44.893127 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:30:44 crc kubenswrapper[4672]: E1007 15:30:44.894290 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:30:55 crc kubenswrapper[4672]: I1007 15:30:55.122464 4672 scope.go:117] "RemoveContainer" containerID="e41e74cb737b47e5643c30976f9543b4bea362733b6e8e52089a9dd49308b4ba" Oct 07 15:30:58 crc kubenswrapper[4672]: I1007 15:30:58.891896 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:30:59 crc kubenswrapper[4672]: I1007 15:30:59.351933 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"39e9cd012da9a768eb4bcb625111ffc034de7d92657d16897da0146e709edd77"} Oct 07 15:32:28 crc kubenswrapper[4672]: I1007 15:32:28.404044 4672 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-6d96d8478f-sx526" podUID="e7406b25-bb39-409b-bde4-75cc32bf4ae2" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Oct 07 15:33:09 crc kubenswrapper[4672]: I1007 15:33:09.494300 4672 generic.go:334] "Generic (PLEG): container finished" podID="be41884e-d5f3-42e7-bd95-3c04629e26e3" containerID="f0005d39538435e36aa393096a2aa7e0173b7d1a15701acd08f88fceefc7b605" exitCode=0 Oct 07 15:33:09 crc kubenswrapper[4672]: I1007 15:33:09.494387 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" event={"ID":"be41884e-d5f3-42e7-bd95-3c04629e26e3","Type":"ContainerDied","Data":"f0005d39538435e36aa393096a2aa7e0173b7d1a15701acd08f88fceefc7b605"} Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.882185 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.986693 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-ssh-key\") pod \"be41884e-d5f3-42e7-bd95-3c04629e26e3\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.987054 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-0\") pod \"be41884e-d5f3-42e7-bd95-3c04629e26e3\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.987078 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-combined-ca-bundle\") pod \"be41884e-d5f3-42e7-bd95-3c04629e26e3\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.987161 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-0\") pod \"be41884e-d5f3-42e7-bd95-3c04629e26e3\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.987184 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-1\") pod \"be41884e-d5f3-42e7-bd95-3c04629e26e3\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.987344 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmcw7\" (UniqueName: \"kubernetes.io/projected/be41884e-d5f3-42e7-bd95-3c04629e26e3-kube-api-access-xmcw7\") pod \"be41884e-d5f3-42e7-bd95-3c04629e26e3\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.987369 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-inventory\") pod \"be41884e-d5f3-42e7-bd95-3c04629e26e3\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.987478 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-1\") pod \"be41884e-d5f3-42e7-bd95-3c04629e26e3\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.987522 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-extra-config-0\") pod \"be41884e-d5f3-42e7-bd95-3c04629e26e3\" (UID: \"be41884e-d5f3-42e7-bd95-3c04629e26e3\") " Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.997817 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be41884e-d5f3-42e7-bd95-3c04629e26e3-kube-api-access-xmcw7" (OuterVolumeSpecName: "kube-api-access-xmcw7") pod "be41884e-d5f3-42e7-bd95-3c04629e26e3" (UID: "be41884e-d5f3-42e7-bd95-3c04629e26e3"). InnerVolumeSpecName "kube-api-access-xmcw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:33:10 crc kubenswrapper[4672]: I1007 15:33:10.998281 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "be41884e-d5f3-42e7-bd95-3c04629e26e3" (UID: "be41884e-d5f3-42e7-bd95-3c04629e26e3"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.013758 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "be41884e-d5f3-42e7-bd95-3c04629e26e3" (UID: "be41884e-d5f3-42e7-bd95-3c04629e26e3"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.016705 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-inventory" (OuterVolumeSpecName: "inventory") pod "be41884e-d5f3-42e7-bd95-3c04629e26e3" (UID: "be41884e-d5f3-42e7-bd95-3c04629e26e3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.019614 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "be41884e-d5f3-42e7-bd95-3c04629e26e3" (UID: "be41884e-d5f3-42e7-bd95-3c04629e26e3"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.019848 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "be41884e-d5f3-42e7-bd95-3c04629e26e3" (UID: "be41884e-d5f3-42e7-bd95-3c04629e26e3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.026842 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "be41884e-d5f3-42e7-bd95-3c04629e26e3" (UID: "be41884e-d5f3-42e7-bd95-3c04629e26e3"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.028868 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "be41884e-d5f3-42e7-bd95-3c04629e26e3" (UID: "be41884e-d5f3-42e7-bd95-3c04629e26e3"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.042943 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "be41884e-d5f3-42e7-bd95-3c04629e26e3" (UID: "be41884e-d5f3-42e7-bd95-3c04629e26e3"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.090168 4672 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.090204 4672 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.090216 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmcw7\" (UniqueName: \"kubernetes.io/projected/be41884e-d5f3-42e7-bd95-3c04629e26e3-kube-api-access-xmcw7\") on node \"crc\" DevicePath \"\"" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.090228 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.090243 4672 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.090256 4672 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.090266 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.090278 4672 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.090288 4672 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be41884e-d5f3-42e7-bd95-3c04629e26e3-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.511710 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" event={"ID":"be41884e-d5f3-42e7-bd95-3c04629e26e3","Type":"ContainerDied","Data":"4ddd2e0a924f4ccdf8994506a15bf10e1b6d454f1a877781681b9683efdd182c"} Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.511749 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ddd2e0a924f4ccdf8994506a15bf10e1b6d454f1a877781681b9683efdd182c" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.511801 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2s7mn" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.620909 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2"] Oct 07 15:33:11 crc kubenswrapper[4672]: E1007 15:33:11.621653 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39920f5a-a280-412f-a27e-23a7c0bb83e1" containerName="collect-profiles" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.621698 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="39920f5a-a280-412f-a27e-23a7c0bb83e1" containerName="collect-profiles" Oct 07 15:33:11 crc kubenswrapper[4672]: E1007 15:33:11.621720 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be41884e-d5f3-42e7-bd95-3c04629e26e3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.621727 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="be41884e-d5f3-42e7-bd95-3c04629e26e3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.621970 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="be41884e-d5f3-42e7-bd95-3c04629e26e3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.622002 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="39920f5a-a280-412f-a27e-23a7c0bb83e1" containerName="collect-profiles" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.622709 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.626218 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.627110 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.627327 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.627499 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.640541 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2"] Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.643616 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mk64t" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.699305 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.699433 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7dzp\" (UniqueName: \"kubernetes.io/projected/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-kube-api-access-z7dzp\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.699495 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.699529 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.699566 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.699608 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.699648 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.801855 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.801961 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7dzp\" (UniqueName: \"kubernetes.io/projected/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-kube-api-access-z7dzp\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.802072 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.802117 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.802149 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.802190 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.802235 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.808038 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.808208 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.808597 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.810600 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.813835 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.817452 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.819698 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7dzp\" (UniqueName: \"kubernetes.io/projected/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-kube-api-access-z7dzp\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jktl2\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:11 crc kubenswrapper[4672]: I1007 15:33:11.952064 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:33:12 crc kubenswrapper[4672]: I1007 15:33:12.507281 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2"] Oct 07 15:33:13 crc kubenswrapper[4672]: I1007 15:33:13.532828 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" event={"ID":"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc","Type":"ContainerStarted","Data":"619476f4c3c350a4ba1bc1afa3ab0c6296eea0744ca72d36b9e58d4b41ce8072"} Oct 07 15:33:14 crc kubenswrapper[4672]: I1007 15:33:14.542402 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" event={"ID":"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc","Type":"ContainerStarted","Data":"f2d5c00f738e17391894152014e53739b41f8b294d77622d943b4f64bbcbc40b"} Oct 07 15:33:14 crc kubenswrapper[4672]: I1007 15:33:14.564469 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" podStartSLOduration=2.555292226 podStartE2EDuration="3.564441914s" podCreationTimestamp="2025-10-07 15:33:11 +0000 UTC" firstStartedPulling="2025-10-07 15:33:12.518937824 +0000 UTC m=+2669.494116405" lastFinishedPulling="2025-10-07 15:33:13.528087522 +0000 UTC m=+2670.503266093" observedRunningTime="2025-10-07 15:33:14.557222814 +0000 UTC m=+2671.532401415" watchObservedRunningTime="2025-10-07 15:33:14.564441914 +0000 UTC m=+2671.539620495" Oct 07 15:33:26 crc kubenswrapper[4672]: I1007 15:33:26.650954 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:33:26 crc kubenswrapper[4672]: I1007 15:33:26.651396 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.100348 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cnjn7"] Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.103552 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.130644 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cnjn7"] Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.186302 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdhbf\" (UniqueName: \"kubernetes.io/projected/24a8e927-1908-46bd-8f03-25e2e63cc855-kube-api-access-pdhbf\") pod \"redhat-operators-cnjn7\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.186460 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-catalog-content\") pod \"redhat-operators-cnjn7\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.186555 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-utilities\") pod \"redhat-operators-cnjn7\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.288092 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdhbf\" (UniqueName: \"kubernetes.io/projected/24a8e927-1908-46bd-8f03-25e2e63cc855-kube-api-access-pdhbf\") pod \"redhat-operators-cnjn7\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.288175 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-catalog-content\") pod \"redhat-operators-cnjn7\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.288215 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-utilities\") pod \"redhat-operators-cnjn7\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.288661 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-catalog-content\") pod \"redhat-operators-cnjn7\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.288740 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-utilities\") pod \"redhat-operators-cnjn7\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.310539 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdhbf\" (UniqueName: \"kubernetes.io/projected/24a8e927-1908-46bd-8f03-25e2e63cc855-kube-api-access-pdhbf\") pod \"redhat-operators-cnjn7\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.427875 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:33:46 crc kubenswrapper[4672]: I1007 15:33:46.884684 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cnjn7"] Oct 07 15:33:47 crc kubenswrapper[4672]: I1007 15:33:47.813210 4672 generic.go:334] "Generic (PLEG): container finished" podID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerID="fc24b2b4fd6de56b4e9f745416f1530ef078b7afc8527dfdd1929d4dc3e0770f" exitCode=0 Oct 07 15:33:47 crc kubenswrapper[4672]: I1007 15:33:47.813379 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cnjn7" event={"ID":"24a8e927-1908-46bd-8f03-25e2e63cc855","Type":"ContainerDied","Data":"fc24b2b4fd6de56b4e9f745416f1530ef078b7afc8527dfdd1929d4dc3e0770f"} Oct 07 15:33:47 crc kubenswrapper[4672]: I1007 15:33:47.813860 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cnjn7" event={"ID":"24a8e927-1908-46bd-8f03-25e2e63cc855","Type":"ContainerStarted","Data":"bc736a06ef0ea2a9d0c42d776dc796182f31a3c496460833aca5643ba7f6c0ac"} Oct 07 15:33:56 crc kubenswrapper[4672]: I1007 15:33:56.650131 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:33:56 crc kubenswrapper[4672]: I1007 15:33:56.650606 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:33:56 crc kubenswrapper[4672]: I1007 15:33:56.902895 4672 generic.go:334] "Generic (PLEG): container finished" podID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerID="4905334ff529958262599844b50b0f161be73f4908a31dd12f7143579d905471" exitCode=0 Oct 07 15:33:56 crc kubenswrapper[4672]: I1007 15:33:56.902950 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cnjn7" event={"ID":"24a8e927-1908-46bd-8f03-25e2e63cc855","Type":"ContainerDied","Data":"4905334ff529958262599844b50b0f161be73f4908a31dd12f7143579d905471"} Oct 07 15:34:08 crc kubenswrapper[4672]: I1007 15:34:08.002567 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cnjn7" event={"ID":"24a8e927-1908-46bd-8f03-25e2e63cc855","Type":"ContainerStarted","Data":"f8cc96227d14ef3db2b06f0b44a71e2fe3d737a74454c832b45b929f9ca97e15"} Oct 07 15:34:08 crc kubenswrapper[4672]: I1007 15:34:08.023676 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cnjn7" podStartSLOduration=2.963983586 podStartE2EDuration="22.023639505s" podCreationTimestamp="2025-10-07 15:33:46 +0000 UTC" firstStartedPulling="2025-10-07 15:33:47.815194482 +0000 UTC m=+2704.790373063" lastFinishedPulling="2025-10-07 15:34:06.874850391 +0000 UTC m=+2723.850028982" observedRunningTime="2025-10-07 15:34:08.019807344 +0000 UTC m=+2724.994985945" watchObservedRunningTime="2025-10-07 15:34:08.023639505 +0000 UTC m=+2724.998818086" Oct 07 15:34:16 crc kubenswrapper[4672]: I1007 15:34:16.429235 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:34:16 crc kubenswrapper[4672]: I1007 15:34:16.431488 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:34:16 crc kubenswrapper[4672]: I1007 15:34:16.480931 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.123748 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.201561 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cnjn7"] Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.276859 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kr5mt"] Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.277138 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kr5mt" podUID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerName="registry-server" containerID="cri-o://fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4" gracePeriod=2 Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.762101 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.807972 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2j4p\" (UniqueName: \"kubernetes.io/projected/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-kube-api-access-f2j4p\") pod \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.808200 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-catalog-content\") pod \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.808580 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-utilities\") pod \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\" (UID: \"7b1bc29c-f45a-4070-8fbb-e58c56ce448d\") " Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.809301 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-utilities" (OuterVolumeSpecName: "utilities") pod "7b1bc29c-f45a-4070-8fbb-e58c56ce448d" (UID: "7b1bc29c-f45a-4070-8fbb-e58c56ce448d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.816425 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-kube-api-access-f2j4p" (OuterVolumeSpecName: "kube-api-access-f2j4p") pod "7b1bc29c-f45a-4070-8fbb-e58c56ce448d" (UID: "7b1bc29c-f45a-4070-8fbb-e58c56ce448d"). InnerVolumeSpecName "kube-api-access-f2j4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.883950 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b1bc29c-f45a-4070-8fbb-e58c56ce448d" (UID: "7b1bc29c-f45a-4070-8fbb-e58c56ce448d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.912371 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.912412 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2j4p\" (UniqueName: \"kubernetes.io/projected/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-kube-api-access-f2j4p\") on node \"crc\" DevicePath \"\"" Oct 07 15:34:17 crc kubenswrapper[4672]: I1007 15:34:17.912426 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b1bc29c-f45a-4070-8fbb-e58c56ce448d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.093572 4672 generic.go:334] "Generic (PLEG): container finished" podID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerID="fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4" exitCode=0 Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.093673 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr5mt" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.093687 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr5mt" event={"ID":"7b1bc29c-f45a-4070-8fbb-e58c56ce448d","Type":"ContainerDied","Data":"fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4"} Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.093764 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr5mt" event={"ID":"7b1bc29c-f45a-4070-8fbb-e58c56ce448d","Type":"ContainerDied","Data":"557369d490f49814c01135a60ae0761db819fbc4b6f14b7a0af2e63c87a02dc6"} Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.093793 4672 scope.go:117] "RemoveContainer" containerID="fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.120372 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kr5mt"] Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.132448 4672 scope.go:117] "RemoveContainer" containerID="dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.142195 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kr5mt"] Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.155189 4672 scope.go:117] "RemoveContainer" containerID="2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.209408 4672 scope.go:117] "RemoveContainer" containerID="fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4" Oct 07 15:34:18 crc kubenswrapper[4672]: E1007 15:34:18.210045 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4\": container with ID starting with fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4 not found: ID does not exist" containerID="fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.210079 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4"} err="failed to get container status \"fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4\": rpc error: code = NotFound desc = could not find container \"fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4\": container with ID starting with fbfb63d58218750a6ae0dc4c23ff182924d5d494bef373bcc879ed3dbae043c4 not found: ID does not exist" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.210101 4672 scope.go:117] "RemoveContainer" containerID="dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b" Oct 07 15:34:18 crc kubenswrapper[4672]: E1007 15:34:18.210297 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b\": container with ID starting with dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b not found: ID does not exist" containerID="dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.210346 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b"} err="failed to get container status \"dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b\": rpc error: code = NotFound desc = could not find container \"dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b\": container with ID starting with dcdecbde4d597073ac15866505781d7f73113be95d6069582aa4bfbc0dd6757b not found: ID does not exist" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.210360 4672 scope.go:117] "RemoveContainer" containerID="2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a" Oct 07 15:34:18 crc kubenswrapper[4672]: E1007 15:34:18.210578 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a\": container with ID starting with 2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a not found: ID does not exist" containerID="2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a" Oct 07 15:34:18 crc kubenswrapper[4672]: I1007 15:34:18.210603 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a"} err="failed to get container status \"2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a\": rpc error: code = NotFound desc = could not find container \"2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a\": container with ID starting with 2add173241a0f30fd52fef66a12af7c892620c96533bb0c1fa5955a3d72f5b3a not found: ID does not exist" Oct 07 15:34:19 crc kubenswrapper[4672]: I1007 15:34:19.904534 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" path="/var/lib/kubelet/pods/7b1bc29c-f45a-4070-8fbb-e58c56ce448d/volumes" Oct 07 15:34:26 crc kubenswrapper[4672]: I1007 15:34:26.651205 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:34:26 crc kubenswrapper[4672]: I1007 15:34:26.651843 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:34:26 crc kubenswrapper[4672]: I1007 15:34:26.651895 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:34:26 crc kubenswrapper[4672]: I1007 15:34:26.652685 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"39e9cd012da9a768eb4bcb625111ffc034de7d92657d16897da0146e709edd77"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:34:26 crc kubenswrapper[4672]: I1007 15:34:26.652738 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://39e9cd012da9a768eb4bcb625111ffc034de7d92657d16897da0146e709edd77" gracePeriod=600 Oct 07 15:34:27 crc kubenswrapper[4672]: I1007 15:34:27.180257 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="39e9cd012da9a768eb4bcb625111ffc034de7d92657d16897da0146e709edd77" exitCode=0 Oct 07 15:34:27 crc kubenswrapper[4672]: I1007 15:34:27.180468 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"39e9cd012da9a768eb4bcb625111ffc034de7d92657d16897da0146e709edd77"} Oct 07 15:34:27 crc kubenswrapper[4672]: I1007 15:34:27.180589 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff"} Oct 07 15:34:27 crc kubenswrapper[4672]: I1007 15:34:27.180615 4672 scope.go:117] "RemoveContainer" containerID="f0694e5aca4eee79e0d694ce9674fef3d79564d4db801118d1d890ad750323d1" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.601497 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-n42fb"] Oct 07 15:34:52 crc kubenswrapper[4672]: E1007 15:34:52.602318 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerName="extract-utilities" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.602330 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerName="extract-utilities" Oct 07 15:34:52 crc kubenswrapper[4672]: E1007 15:34:52.602353 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerName="registry-server" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.602359 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerName="registry-server" Oct 07 15:34:52 crc kubenswrapper[4672]: E1007 15:34:52.602391 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerName="extract-content" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.602397 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerName="extract-content" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.602567 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1bc29c-f45a-4070-8fbb-e58c56ce448d" containerName="registry-server" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.603840 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.623774 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n42fb"] Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.747626 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chv8k\" (UniqueName: \"kubernetes.io/projected/03683614-2760-4f58-9ca6-deab367a54e4-kube-api-access-chv8k\") pod \"certified-operators-n42fb\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.748102 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-catalog-content\") pod \"certified-operators-n42fb\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.748248 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-utilities\") pod \"certified-operators-n42fb\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.849690 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chv8k\" (UniqueName: \"kubernetes.io/projected/03683614-2760-4f58-9ca6-deab367a54e4-kube-api-access-chv8k\") pod \"certified-operators-n42fb\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.849825 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-catalog-content\") pod \"certified-operators-n42fb\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.849901 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-utilities\") pod \"certified-operators-n42fb\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.850563 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-catalog-content\") pod \"certified-operators-n42fb\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.850647 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-utilities\") pod \"certified-operators-n42fb\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.877633 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chv8k\" (UniqueName: \"kubernetes.io/projected/03683614-2760-4f58-9ca6-deab367a54e4-kube-api-access-chv8k\") pod \"certified-operators-n42fb\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:52 crc kubenswrapper[4672]: I1007 15:34:52.932498 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:34:53 crc kubenswrapper[4672]: I1007 15:34:53.250761 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n42fb"] Oct 07 15:34:53 crc kubenswrapper[4672]: I1007 15:34:53.432601 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n42fb" event={"ID":"03683614-2760-4f58-9ca6-deab367a54e4","Type":"ContainerStarted","Data":"9e27fd73f5555a222ff381f217b9fd84b221d966d40a1bf7cd51e181c67e6d02"} Oct 07 15:34:54 crc kubenswrapper[4672]: I1007 15:34:54.443530 4672 generic.go:334] "Generic (PLEG): container finished" podID="03683614-2760-4f58-9ca6-deab367a54e4" containerID="6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7" exitCode=0 Oct 07 15:34:54 crc kubenswrapper[4672]: I1007 15:34:54.443655 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n42fb" event={"ID":"03683614-2760-4f58-9ca6-deab367a54e4","Type":"ContainerDied","Data":"6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7"} Oct 07 15:34:55 crc kubenswrapper[4672]: I1007 15:34:55.454661 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n42fb" event={"ID":"03683614-2760-4f58-9ca6-deab367a54e4","Type":"ContainerStarted","Data":"4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47"} Oct 07 15:34:56 crc kubenswrapper[4672]: I1007 15:34:56.482087 4672 generic.go:334] "Generic (PLEG): container finished" podID="03683614-2760-4f58-9ca6-deab367a54e4" containerID="4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47" exitCode=0 Oct 07 15:34:56 crc kubenswrapper[4672]: I1007 15:34:56.482188 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n42fb" event={"ID":"03683614-2760-4f58-9ca6-deab367a54e4","Type":"ContainerDied","Data":"4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47"} Oct 07 15:34:56 crc kubenswrapper[4672]: I1007 15:34:56.482361 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n42fb" event={"ID":"03683614-2760-4f58-9ca6-deab367a54e4","Type":"ContainerStarted","Data":"25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6"} Oct 07 15:34:56 crc kubenswrapper[4672]: I1007 15:34:56.502311 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-n42fb" podStartSLOduration=2.821076916 podStartE2EDuration="4.502291745s" podCreationTimestamp="2025-10-07 15:34:52 +0000 UTC" firstStartedPulling="2025-10-07 15:34:54.446872706 +0000 UTC m=+2771.422051287" lastFinishedPulling="2025-10-07 15:34:56.128087535 +0000 UTC m=+2773.103266116" observedRunningTime="2025-10-07 15:34:56.501906914 +0000 UTC m=+2773.477085535" watchObservedRunningTime="2025-10-07 15:34:56.502291745 +0000 UTC m=+2773.477470326" Oct 07 15:35:02 crc kubenswrapper[4672]: I1007 15:35:02.932619 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:35:02 crc kubenswrapper[4672]: I1007 15:35:02.933193 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:35:02 crc kubenswrapper[4672]: I1007 15:35:02.993519 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:35:03 crc kubenswrapper[4672]: I1007 15:35:03.586859 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:35:03 crc kubenswrapper[4672]: I1007 15:35:03.634833 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n42fb"] Oct 07 15:35:05 crc kubenswrapper[4672]: I1007 15:35:05.562472 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-n42fb" podUID="03683614-2760-4f58-9ca6-deab367a54e4" containerName="registry-server" containerID="cri-o://25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6" gracePeriod=2 Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.575276 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.577752 4672 generic.go:334] "Generic (PLEG): container finished" podID="03683614-2760-4f58-9ca6-deab367a54e4" containerID="25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6" exitCode=0 Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.577795 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n42fb" event={"ID":"03683614-2760-4f58-9ca6-deab367a54e4","Type":"ContainerDied","Data":"25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6"} Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.577825 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n42fb" event={"ID":"03683614-2760-4f58-9ca6-deab367a54e4","Type":"ContainerDied","Data":"9e27fd73f5555a222ff381f217b9fd84b221d966d40a1bf7cd51e181c67e6d02"} Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.577844 4672 scope.go:117] "RemoveContainer" containerID="25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.622304 4672 scope.go:117] "RemoveContainer" containerID="4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.661745 4672 scope.go:117] "RemoveContainer" containerID="6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.707471 4672 scope.go:117] "RemoveContainer" containerID="25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6" Oct 07 15:35:06 crc kubenswrapper[4672]: E1007 15:35:06.708003 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6\": container with ID starting with 25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6 not found: ID does not exist" containerID="25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.708055 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6"} err="failed to get container status \"25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6\": rpc error: code = NotFound desc = could not find container \"25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6\": container with ID starting with 25e6717a09e362d31823d991e5870a0336f6b3ea1bd31dd95e7a7d7be557e9d6 not found: ID does not exist" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.708082 4672 scope.go:117] "RemoveContainer" containerID="4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47" Oct 07 15:35:06 crc kubenswrapper[4672]: E1007 15:35:06.708540 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47\": container with ID starting with 4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47 not found: ID does not exist" containerID="4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.708613 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47"} err="failed to get container status \"4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47\": rpc error: code = NotFound desc = could not find container \"4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47\": container with ID starting with 4bba10f4cd51bd1eeb2c7e7db45d2dfe5f601e968ff36e6b2a8f8bdd2472cd47 not found: ID does not exist" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.708655 4672 scope.go:117] "RemoveContainer" containerID="6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7" Oct 07 15:35:06 crc kubenswrapper[4672]: E1007 15:35:06.709195 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7\": container with ID starting with 6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7 not found: ID does not exist" containerID="6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.709239 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7"} err="failed to get container status \"6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7\": rpc error: code = NotFound desc = could not find container \"6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7\": container with ID starting with 6399b7d2f3f9ad21b4449bc88eddc3d167a2d188d6b12ce92b63fa6156f5d8d7 not found: ID does not exist" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.742474 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chv8k\" (UniqueName: \"kubernetes.io/projected/03683614-2760-4f58-9ca6-deab367a54e4-kube-api-access-chv8k\") pod \"03683614-2760-4f58-9ca6-deab367a54e4\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.742636 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-utilities\") pod \"03683614-2760-4f58-9ca6-deab367a54e4\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.742921 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-catalog-content\") pod \"03683614-2760-4f58-9ca6-deab367a54e4\" (UID: \"03683614-2760-4f58-9ca6-deab367a54e4\") " Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.745132 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-utilities" (OuterVolumeSpecName: "utilities") pod "03683614-2760-4f58-9ca6-deab367a54e4" (UID: "03683614-2760-4f58-9ca6-deab367a54e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.752329 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03683614-2760-4f58-9ca6-deab367a54e4-kube-api-access-chv8k" (OuterVolumeSpecName: "kube-api-access-chv8k") pod "03683614-2760-4f58-9ca6-deab367a54e4" (UID: "03683614-2760-4f58-9ca6-deab367a54e4"). InnerVolumeSpecName "kube-api-access-chv8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.790297 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03683614-2760-4f58-9ca6-deab367a54e4" (UID: "03683614-2760-4f58-9ca6-deab367a54e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.846382 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.846436 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03683614-2760-4f58-9ca6-deab367a54e4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:06 crc kubenswrapper[4672]: I1007 15:35:06.846458 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chv8k\" (UniqueName: \"kubernetes.io/projected/03683614-2760-4f58-9ca6-deab367a54e4-kube-api-access-chv8k\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:07 crc kubenswrapper[4672]: I1007 15:35:07.590442 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n42fb" Oct 07 15:35:07 crc kubenswrapper[4672]: I1007 15:35:07.625484 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n42fb"] Oct 07 15:35:07 crc kubenswrapper[4672]: I1007 15:35:07.633574 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-n42fb"] Oct 07 15:35:07 crc kubenswrapper[4672]: I1007 15:35:07.903438 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03683614-2760-4f58-9ca6-deab367a54e4" path="/var/lib/kubelet/pods/03683614-2760-4f58-9ca6-deab367a54e4/volumes" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.071075 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6s6v9"] Oct 07 15:35:14 crc kubenswrapper[4672]: E1007 15:35:14.072338 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03683614-2760-4f58-9ca6-deab367a54e4" containerName="extract-content" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.072354 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="03683614-2760-4f58-9ca6-deab367a54e4" containerName="extract-content" Oct 07 15:35:14 crc kubenswrapper[4672]: E1007 15:35:14.072379 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03683614-2760-4f58-9ca6-deab367a54e4" containerName="extract-utilities" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.072385 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="03683614-2760-4f58-9ca6-deab367a54e4" containerName="extract-utilities" Oct 07 15:35:14 crc kubenswrapper[4672]: E1007 15:35:14.072397 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03683614-2760-4f58-9ca6-deab367a54e4" containerName="registry-server" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.072403 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="03683614-2760-4f58-9ca6-deab367a54e4" containerName="registry-server" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.072611 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="03683614-2760-4f58-9ca6-deab367a54e4" containerName="registry-server" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.074135 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.094934 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6s6v9"] Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.095974 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl98r\" (UniqueName: \"kubernetes.io/projected/8b475f81-8be9-415f-9ee9-6f1b8de59040-kube-api-access-dl98r\") pod \"redhat-marketplace-6s6v9\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.096069 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-utilities\") pod \"redhat-marketplace-6s6v9\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.096154 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-catalog-content\") pod \"redhat-marketplace-6s6v9\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.198494 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl98r\" (UniqueName: \"kubernetes.io/projected/8b475f81-8be9-415f-9ee9-6f1b8de59040-kube-api-access-dl98r\") pod \"redhat-marketplace-6s6v9\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.198896 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-utilities\") pod \"redhat-marketplace-6s6v9\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.199096 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-catalog-content\") pod \"redhat-marketplace-6s6v9\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.199493 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-utilities\") pod \"redhat-marketplace-6s6v9\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.199700 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-catalog-content\") pod \"redhat-marketplace-6s6v9\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.218360 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl98r\" (UniqueName: \"kubernetes.io/projected/8b475f81-8be9-415f-9ee9-6f1b8de59040-kube-api-access-dl98r\") pod \"redhat-marketplace-6s6v9\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.396190 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:14 crc kubenswrapper[4672]: I1007 15:35:14.823111 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6s6v9"] Oct 07 15:35:14 crc kubenswrapper[4672]: W1007 15:35:14.823865 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b475f81_8be9_415f_9ee9_6f1b8de59040.slice/crio-28a4a8ed557685a5df637c45ce5d4b9779e0aeec790cdd781fa510f33b35a300 WatchSource:0}: Error finding container 28a4a8ed557685a5df637c45ce5d4b9779e0aeec790cdd781fa510f33b35a300: Status 404 returned error can't find the container with id 28a4a8ed557685a5df637c45ce5d4b9779e0aeec790cdd781fa510f33b35a300 Oct 07 15:35:15 crc kubenswrapper[4672]: I1007 15:35:15.673035 4672 generic.go:334] "Generic (PLEG): container finished" podID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerID="095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74" exitCode=0 Oct 07 15:35:15 crc kubenswrapper[4672]: I1007 15:35:15.673093 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s6v9" event={"ID":"8b475f81-8be9-415f-9ee9-6f1b8de59040","Type":"ContainerDied","Data":"095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74"} Oct 07 15:35:15 crc kubenswrapper[4672]: I1007 15:35:15.674163 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s6v9" event={"ID":"8b475f81-8be9-415f-9ee9-6f1b8de59040","Type":"ContainerStarted","Data":"28a4a8ed557685a5df637c45ce5d4b9779e0aeec790cdd781fa510f33b35a300"} Oct 07 15:35:15 crc kubenswrapper[4672]: I1007 15:35:15.675448 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 15:35:17 crc kubenswrapper[4672]: I1007 15:35:17.690889 4672 generic.go:334] "Generic (PLEG): container finished" podID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerID="dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393" exitCode=0 Oct 07 15:35:17 crc kubenswrapper[4672]: I1007 15:35:17.691076 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s6v9" event={"ID":"8b475f81-8be9-415f-9ee9-6f1b8de59040","Type":"ContainerDied","Data":"dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393"} Oct 07 15:35:18 crc kubenswrapper[4672]: I1007 15:35:18.702709 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s6v9" event={"ID":"8b475f81-8be9-415f-9ee9-6f1b8de59040","Type":"ContainerStarted","Data":"81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d"} Oct 07 15:35:18 crc kubenswrapper[4672]: I1007 15:35:18.732632 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6s6v9" podStartSLOduration=2.045042955 podStartE2EDuration="4.732608192s" podCreationTimestamp="2025-10-07 15:35:14 +0000 UTC" firstStartedPulling="2025-10-07 15:35:15.675084348 +0000 UTC m=+2792.650262929" lastFinishedPulling="2025-10-07 15:35:18.362649585 +0000 UTC m=+2795.337828166" observedRunningTime="2025-10-07 15:35:18.72121607 +0000 UTC m=+2795.696394661" watchObservedRunningTime="2025-10-07 15:35:18.732608192 +0000 UTC m=+2795.707786773" Oct 07 15:35:24 crc kubenswrapper[4672]: I1007 15:35:24.396452 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:24 crc kubenswrapper[4672]: I1007 15:35:24.396871 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:24 crc kubenswrapper[4672]: I1007 15:35:24.448643 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:24 crc kubenswrapper[4672]: I1007 15:35:24.811371 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:24 crc kubenswrapper[4672]: I1007 15:35:24.990394 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6s6v9"] Oct 07 15:35:26 crc kubenswrapper[4672]: I1007 15:35:26.780667 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6s6v9" podUID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerName="registry-server" containerID="cri-o://81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d" gracePeriod=2 Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.229802 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.339479 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-utilities\") pod \"8b475f81-8be9-415f-9ee9-6f1b8de59040\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.339582 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl98r\" (UniqueName: \"kubernetes.io/projected/8b475f81-8be9-415f-9ee9-6f1b8de59040-kube-api-access-dl98r\") pod \"8b475f81-8be9-415f-9ee9-6f1b8de59040\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.339947 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-catalog-content\") pod \"8b475f81-8be9-415f-9ee9-6f1b8de59040\" (UID: \"8b475f81-8be9-415f-9ee9-6f1b8de59040\") " Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.340881 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-utilities" (OuterVolumeSpecName: "utilities") pod "8b475f81-8be9-415f-9ee9-6f1b8de59040" (UID: "8b475f81-8be9-415f-9ee9-6f1b8de59040"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.341342 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.347215 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b475f81-8be9-415f-9ee9-6f1b8de59040-kube-api-access-dl98r" (OuterVolumeSpecName: "kube-api-access-dl98r") pod "8b475f81-8be9-415f-9ee9-6f1b8de59040" (UID: "8b475f81-8be9-415f-9ee9-6f1b8de59040"). InnerVolumeSpecName "kube-api-access-dl98r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.353339 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b475f81-8be9-415f-9ee9-6f1b8de59040" (UID: "8b475f81-8be9-415f-9ee9-6f1b8de59040"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.444802 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b475f81-8be9-415f-9ee9-6f1b8de59040-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.444849 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl98r\" (UniqueName: \"kubernetes.io/projected/8b475f81-8be9-415f-9ee9-6f1b8de59040-kube-api-access-dl98r\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.791549 4672 generic.go:334] "Generic (PLEG): container finished" podID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerID="81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d" exitCode=0 Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.791598 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s6v9" event={"ID":"8b475f81-8be9-415f-9ee9-6f1b8de59040","Type":"ContainerDied","Data":"81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d"} Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.791626 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s6v9" event={"ID":"8b475f81-8be9-415f-9ee9-6f1b8de59040","Type":"ContainerDied","Data":"28a4a8ed557685a5df637c45ce5d4b9779e0aeec790cdd781fa510f33b35a300"} Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.791630 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6s6v9" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.791645 4672 scope.go:117] "RemoveContainer" containerID="81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.813858 4672 scope.go:117] "RemoveContainer" containerID="dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.831007 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6s6v9"] Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.843312 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6s6v9"] Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.853227 4672 scope.go:117] "RemoveContainer" containerID="095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.876174 4672 scope.go:117] "RemoveContainer" containerID="81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d" Oct 07 15:35:27 crc kubenswrapper[4672]: E1007 15:35:27.876772 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d\": container with ID starting with 81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d not found: ID does not exist" containerID="81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.876828 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d"} err="failed to get container status \"81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d\": rpc error: code = NotFound desc = could not find container \"81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d\": container with ID starting with 81bfaf9fa7134250d455076bcedfa8c27427d2f938be28f30fbc5b55091f851d not found: ID does not exist" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.876861 4672 scope.go:117] "RemoveContainer" containerID="dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393" Oct 07 15:35:27 crc kubenswrapper[4672]: E1007 15:35:27.877343 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393\": container with ID starting with dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393 not found: ID does not exist" containerID="dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.877378 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393"} err="failed to get container status \"dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393\": rpc error: code = NotFound desc = could not find container \"dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393\": container with ID starting with dcf2c4447789730aad196ecfef047bfa387cfe4b792894f2d6593ae3256c8393 not found: ID does not exist" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.877410 4672 scope.go:117] "RemoveContainer" containerID="095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74" Oct 07 15:35:27 crc kubenswrapper[4672]: E1007 15:35:27.877721 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74\": container with ID starting with 095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74 not found: ID does not exist" containerID="095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.877748 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74"} err="failed to get container status \"095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74\": rpc error: code = NotFound desc = could not find container \"095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74\": container with ID starting with 095c402b84b83dfea9ee18767a07af93f2bf599c6c170d40458d64f10ac95c74 not found: ID does not exist" Oct 07 15:35:27 crc kubenswrapper[4672]: I1007 15:35:27.904221 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b475f81-8be9-415f-9ee9-6f1b8de59040" path="/var/lib/kubelet/pods/8b475f81-8be9-415f-9ee9-6f1b8de59040/volumes" Oct 07 15:35:35 crc kubenswrapper[4672]: I1007 15:35:35.859206 4672 generic.go:334] "Generic (PLEG): container finished" podID="a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" containerID="f2d5c00f738e17391894152014e53739b41f8b294d77622d943b4f64bbcbc40b" exitCode=0 Oct 07 15:35:35 crc kubenswrapper[4672]: I1007 15:35:35.859280 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" event={"ID":"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc","Type":"ContainerDied","Data":"f2d5c00f738e17391894152014e53739b41f8b294d77622d943b4f64bbcbc40b"} Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.249187 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.327321 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-1\") pod \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.327401 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-telemetry-combined-ca-bundle\") pod \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.327533 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-inventory\") pod \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.327586 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-2\") pod \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.327611 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-0\") pod \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.327631 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ssh-key\") pod \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.327755 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7dzp\" (UniqueName: \"kubernetes.io/projected/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-kube-api-access-z7dzp\") pod \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\" (UID: \"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc\") " Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.333350 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-kube-api-access-z7dzp" (OuterVolumeSpecName: "kube-api-access-z7dzp") pod "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" (UID: "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc"). InnerVolumeSpecName "kube-api-access-z7dzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.333911 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" (UID: "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.356712 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" (UID: "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.360581 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" (UID: "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.361180 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" (UID: "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.362495 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-inventory" (OuterVolumeSpecName: "inventory") pod "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" (UID: "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.363158 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" (UID: "a7d08d2c-7a70-47f3-afa0-f93d34efd7dc"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.430446 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7dzp\" (UniqueName: \"kubernetes.io/projected/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-kube-api-access-z7dzp\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.430485 4672 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.430498 4672 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.430512 4672 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.430524 4672 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.430534 4672 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.430544 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7d08d2c-7a70-47f3-afa0-f93d34efd7dc-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.877659 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" event={"ID":"a7d08d2c-7a70-47f3-afa0-f93d34efd7dc","Type":"ContainerDied","Data":"619476f4c3c350a4ba1bc1afa3ab0c6296eea0744ca72d36b9e58d4b41ce8072"} Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.877698 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="619476f4c3c350a4ba1bc1afa3ab0c6296eea0744ca72d36b9e58d4b41ce8072" Oct 07 15:35:37 crc kubenswrapper[4672]: I1007 15:35:37.877755 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jktl2" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.448392 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 07 15:36:21 crc kubenswrapper[4672]: E1007 15:36:21.449374 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerName="extract-utilities" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.449393 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerName="extract-utilities" Oct 07 15:36:21 crc kubenswrapper[4672]: E1007 15:36:21.449420 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.449429 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 07 15:36:21 crc kubenswrapper[4672]: E1007 15:36:21.449435 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerName="registry-server" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.449471 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerName="registry-server" Oct 07 15:36:21 crc kubenswrapper[4672]: E1007 15:36:21.449503 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerName="extract-content" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.449509 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerName="extract-content" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.449701 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d08d2c-7a70-47f3-afa0-f93d34efd7dc" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.449711 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b475f81-8be9-415f-9ee9-6f1b8de59040" containerName="registry-server" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.450454 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.452234 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.452292 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.452593 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.452808 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-v6mp7" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.458192 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.551940 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.552052 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.552087 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.552113 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.552132 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.552331 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.552503 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-config-data\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.552561 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v89n\" (UniqueName: \"kubernetes.io/projected/5104b964-0b2d-4b1f-920b-a0b3016ed885-kube-api-access-7v89n\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.552609 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.654908 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.654983 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655047 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655082 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655186 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655226 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-config-data\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655249 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v89n\" (UniqueName: \"kubernetes.io/projected/5104b964-0b2d-4b1f-920b-a0b3016ed885-kube-api-access-7v89n\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655283 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655326 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655351 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655552 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.655860 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.656565 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.657330 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-config-data\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.662601 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.663053 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.665624 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.680721 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v89n\" (UniqueName: \"kubernetes.io/projected/5104b964-0b2d-4b1f-920b-a0b3016ed885-kube-api-access-7v89n\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.686778 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " pod="openstack/tempest-tests-tempest" Oct 07 15:36:21 crc kubenswrapper[4672]: I1007 15:36:21.780074 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 15:36:22 crc kubenswrapper[4672]: I1007 15:36:22.230550 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 07 15:36:22 crc kubenswrapper[4672]: I1007 15:36:22.258798 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5104b964-0b2d-4b1f-920b-a0b3016ed885","Type":"ContainerStarted","Data":"d5bc22cbf94a0c399815c09c41f42133268d75bc9a33c4849b1374af735b17f5"} Oct 07 15:36:26 crc kubenswrapper[4672]: I1007 15:36:26.650601 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:36:26 crc kubenswrapper[4672]: I1007 15:36:26.651710 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:36:35 crc kubenswrapper[4672]: E1007 15:36:35.968795 4672 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-tempest-all:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:36:35 crc kubenswrapper[4672]: E1007 15:36:35.969341 4672 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.103:5001/podified-antelope-centos9/openstack-tempest-all:b78cfc68a577b1553523c8a70a34e297" Oct 07 15:36:35 crc kubenswrapper[4672]: E1007 15:36:35.969473 4672 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:38.102.83.103:5001/podified-antelope-centos9/openstack-tempest-all:b78cfc68a577b1553523c8a70a34e297,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7v89n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(5104b964-0b2d-4b1f-920b-a0b3016ed885): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 15:36:35 crc kubenswrapper[4672]: E1007 15:36:35.970642 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="5104b964-0b2d-4b1f-920b-a0b3016ed885" Oct 07 15:36:36 crc kubenswrapper[4672]: E1007 15:36:36.392156 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.103:5001/podified-antelope-centos9/openstack-tempest-all:b78cfc68a577b1553523c8a70a34e297\\\"\"" pod="openstack/tempest-tests-tempest" podUID="5104b964-0b2d-4b1f-920b-a0b3016ed885" Oct 07 15:36:50 crc kubenswrapper[4672]: I1007 15:36:50.957040 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 07 15:36:52 crc kubenswrapper[4672]: I1007 15:36:52.540696 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5104b964-0b2d-4b1f-920b-a0b3016ed885","Type":"ContainerStarted","Data":"86c11bf094befdad0a24f39255687c210bcf6e09a383198b9d27c13e46b3f937"} Oct 07 15:36:52 crc kubenswrapper[4672]: I1007 15:36:52.564531 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.854000811 podStartE2EDuration="32.564511433s" podCreationTimestamp="2025-10-07 15:36:20 +0000 UTC" firstStartedPulling="2025-10-07 15:36:22.24244621 +0000 UTC m=+2859.217624791" lastFinishedPulling="2025-10-07 15:36:50.952956822 +0000 UTC m=+2887.928135413" observedRunningTime="2025-10-07 15:36:52.560065314 +0000 UTC m=+2889.535243905" watchObservedRunningTime="2025-10-07 15:36:52.564511433 +0000 UTC m=+2889.539690014" Oct 07 15:36:56 crc kubenswrapper[4672]: I1007 15:36:56.650587 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:36:56 crc kubenswrapper[4672]: I1007 15:36:56.651178 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:37:26 crc kubenswrapper[4672]: I1007 15:37:26.650257 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:37:26 crc kubenswrapper[4672]: I1007 15:37:26.650928 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:37:26 crc kubenswrapper[4672]: I1007 15:37:26.650985 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:37:26 crc kubenswrapper[4672]: I1007 15:37:26.651753 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:37:26 crc kubenswrapper[4672]: I1007 15:37:26.651816 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" gracePeriod=600 Oct 07 15:37:26 crc kubenswrapper[4672]: E1007 15:37:26.802774 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:37:26 crc kubenswrapper[4672]: I1007 15:37:26.869673 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" exitCode=0 Oct 07 15:37:26 crc kubenswrapper[4672]: I1007 15:37:26.869759 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff"} Oct 07 15:37:26 crc kubenswrapper[4672]: I1007 15:37:26.870120 4672 scope.go:117] "RemoveContainer" containerID="39e9cd012da9a768eb4bcb625111ffc034de7d92657d16897da0146e709edd77" Oct 07 15:37:26 crc kubenswrapper[4672]: I1007 15:37:26.871518 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:37:26 crc kubenswrapper[4672]: E1007 15:37:26.871951 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.682821 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v2g5m"] Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.685572 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.712361 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2g5m"] Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.813500 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b530062a-623d-417b-9cea-906c854ee3ed-utilities\") pod \"community-operators-v2g5m\" (UID: \"b530062a-623d-417b-9cea-906c854ee3ed\") " pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.813584 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl7t5\" (UniqueName: \"kubernetes.io/projected/b530062a-623d-417b-9cea-906c854ee3ed-kube-api-access-tl7t5\") pod \"community-operators-v2g5m\" (UID: \"b530062a-623d-417b-9cea-906c854ee3ed\") " pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.813605 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b530062a-623d-417b-9cea-906c854ee3ed-catalog-content\") pod \"community-operators-v2g5m\" (UID: \"b530062a-623d-417b-9cea-906c854ee3ed\") " pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.916719 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b530062a-623d-417b-9cea-906c854ee3ed-utilities\") pod \"community-operators-v2g5m\" (UID: \"b530062a-623d-417b-9cea-906c854ee3ed\") " pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.916820 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl7t5\" (UniqueName: \"kubernetes.io/projected/b530062a-623d-417b-9cea-906c854ee3ed-kube-api-access-tl7t5\") pod \"community-operators-v2g5m\" (UID: \"b530062a-623d-417b-9cea-906c854ee3ed\") " pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.916843 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b530062a-623d-417b-9cea-906c854ee3ed-catalog-content\") pod \"community-operators-v2g5m\" (UID: \"b530062a-623d-417b-9cea-906c854ee3ed\") " pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.917695 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b530062a-623d-417b-9cea-906c854ee3ed-catalog-content\") pod \"community-operators-v2g5m\" (UID: \"b530062a-623d-417b-9cea-906c854ee3ed\") " pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.917753 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b530062a-623d-417b-9cea-906c854ee3ed-utilities\") pod \"community-operators-v2g5m\" (UID: \"b530062a-623d-417b-9cea-906c854ee3ed\") " pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:28 crc kubenswrapper[4672]: I1007 15:37:28.936067 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl7t5\" (UniqueName: \"kubernetes.io/projected/b530062a-623d-417b-9cea-906c854ee3ed-kube-api-access-tl7t5\") pod \"community-operators-v2g5m\" (UID: \"b530062a-623d-417b-9cea-906c854ee3ed\") " pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:29 crc kubenswrapper[4672]: I1007 15:37:29.019701 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:29 crc kubenswrapper[4672]: I1007 15:37:29.597115 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2g5m"] Oct 07 15:37:29 crc kubenswrapper[4672]: I1007 15:37:29.910358 4672 generic.go:334] "Generic (PLEG): container finished" podID="b530062a-623d-417b-9cea-906c854ee3ed" containerID="2e762fb6a14ce99273c2e35616dfbf90a7e70ce5af8dde1bb73113c2f3f68d35" exitCode=0 Oct 07 15:37:29 crc kubenswrapper[4672]: I1007 15:37:29.910435 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2g5m" event={"ID":"b530062a-623d-417b-9cea-906c854ee3ed","Type":"ContainerDied","Data":"2e762fb6a14ce99273c2e35616dfbf90a7e70ce5af8dde1bb73113c2f3f68d35"} Oct 07 15:37:29 crc kubenswrapper[4672]: I1007 15:37:29.910680 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2g5m" event={"ID":"b530062a-623d-417b-9cea-906c854ee3ed","Type":"ContainerStarted","Data":"68f5e6324dfff52719e6ea22d5959a8ec98563b7315714d2cc7988e875bf4948"} Oct 07 15:37:34 crc kubenswrapper[4672]: I1007 15:37:34.957598 4672 generic.go:334] "Generic (PLEG): container finished" podID="b530062a-623d-417b-9cea-906c854ee3ed" containerID="e0b8fb3841710439458fd8c10ddb8c1b1a3b22338ee0f9c44528c4bab4ec28cc" exitCode=0 Oct 07 15:37:34 crc kubenswrapper[4672]: I1007 15:37:34.957667 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2g5m" event={"ID":"b530062a-623d-417b-9cea-906c854ee3ed","Type":"ContainerDied","Data":"e0b8fb3841710439458fd8c10ddb8c1b1a3b22338ee0f9c44528c4bab4ec28cc"} Oct 07 15:37:37 crc kubenswrapper[4672]: I1007 15:37:37.995790 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2g5m" event={"ID":"b530062a-623d-417b-9cea-906c854ee3ed","Type":"ContainerStarted","Data":"e095eb866c0d138c352567652d869075e1e8e088293ae523e3c583a67f12a6fd"} Oct 07 15:37:38 crc kubenswrapper[4672]: I1007 15:37:38.020975 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v2g5m" podStartSLOduration=3.244530209 podStartE2EDuration="10.020899792s" podCreationTimestamp="2025-10-07 15:37:28 +0000 UTC" firstStartedPulling="2025-10-07 15:37:29.913224063 +0000 UTC m=+2926.888402644" lastFinishedPulling="2025-10-07 15:37:36.689593646 +0000 UTC m=+2933.664772227" observedRunningTime="2025-10-07 15:37:38.015441814 +0000 UTC m=+2934.990620415" watchObservedRunningTime="2025-10-07 15:37:38.020899792 +0000 UTC m=+2934.996078393" Oct 07 15:37:38 crc kubenswrapper[4672]: I1007 15:37:38.892083 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:37:38 crc kubenswrapper[4672]: E1007 15:37:38.892615 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:37:39 crc kubenswrapper[4672]: I1007 15:37:39.020937 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:39 crc kubenswrapper[4672]: I1007 15:37:39.021806 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:40 crc kubenswrapper[4672]: I1007 15:37:40.076787 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-v2g5m" podUID="b530062a-623d-417b-9cea-906c854ee3ed" containerName="registry-server" probeResult="failure" output=< Oct 07 15:37:40 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 15:37:40 crc kubenswrapper[4672]: > Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.076757 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.132400 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v2g5m" Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.232151 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2g5m"] Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.315099 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9pg96"] Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.315583 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9pg96" podUID="ae974611-9a8a-42b1-8406-5f532debaab1" containerName="registry-server" containerID="cri-o://2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75" gracePeriod=2 Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.860462 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9pg96" Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.965257 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-catalog-content\") pod \"ae974611-9a8a-42b1-8406-5f532debaab1\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.965438 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hw9pr\" (UniqueName: \"kubernetes.io/projected/ae974611-9a8a-42b1-8406-5f532debaab1-kube-api-access-hw9pr\") pod \"ae974611-9a8a-42b1-8406-5f532debaab1\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.965522 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-utilities\") pod \"ae974611-9a8a-42b1-8406-5f532debaab1\" (UID: \"ae974611-9a8a-42b1-8406-5f532debaab1\") " Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.968913 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-utilities" (OuterVolumeSpecName: "utilities") pod "ae974611-9a8a-42b1-8406-5f532debaab1" (UID: "ae974611-9a8a-42b1-8406-5f532debaab1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.969269 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:37:49 crc kubenswrapper[4672]: I1007 15:37:49.977767 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae974611-9a8a-42b1-8406-5f532debaab1-kube-api-access-hw9pr" (OuterVolumeSpecName: "kube-api-access-hw9pr") pod "ae974611-9a8a-42b1-8406-5f532debaab1" (UID: "ae974611-9a8a-42b1-8406-5f532debaab1"). InnerVolumeSpecName "kube-api-access-hw9pr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.030376 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae974611-9a8a-42b1-8406-5f532debaab1" (UID: "ae974611-9a8a-42b1-8406-5f532debaab1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.071007 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae974611-9a8a-42b1-8406-5f532debaab1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.071062 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hw9pr\" (UniqueName: \"kubernetes.io/projected/ae974611-9a8a-42b1-8406-5f532debaab1-kube-api-access-hw9pr\") on node \"crc\" DevicePath \"\"" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.128316 4672 generic.go:334] "Generic (PLEG): container finished" podID="ae974611-9a8a-42b1-8406-5f532debaab1" containerID="2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75" exitCode=0 Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.128405 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pg96" event={"ID":"ae974611-9a8a-42b1-8406-5f532debaab1","Type":"ContainerDied","Data":"2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75"} Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.128445 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pg96" event={"ID":"ae974611-9a8a-42b1-8406-5f532debaab1","Type":"ContainerDied","Data":"dcdb913afada179889244037da43249b16fa7eff56f17c156bf028c2839a3a6e"} Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.128465 4672 scope.go:117] "RemoveContainer" containerID="2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.128412 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9pg96" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.164921 4672 scope.go:117] "RemoveContainer" containerID="b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.169565 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9pg96"] Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.180239 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9pg96"] Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.189885 4672 scope.go:117] "RemoveContainer" containerID="fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.236731 4672 scope.go:117] "RemoveContainer" containerID="2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75" Oct 07 15:37:50 crc kubenswrapper[4672]: E1007 15:37:50.237202 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75\": container with ID starting with 2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75 not found: ID does not exist" containerID="2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.237237 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75"} err="failed to get container status \"2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75\": rpc error: code = NotFound desc = could not find container \"2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75\": container with ID starting with 2437c32363539e7f1652b3446745519b4d5e6395318f51a621718dfd55666f75 not found: ID does not exist" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.237263 4672 scope.go:117] "RemoveContainer" containerID="b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba" Oct 07 15:37:50 crc kubenswrapper[4672]: E1007 15:37:50.237619 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba\": container with ID starting with b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba not found: ID does not exist" containerID="b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.237739 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba"} err="failed to get container status \"b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba\": rpc error: code = NotFound desc = could not find container \"b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba\": container with ID starting with b8f517e4391b622cb711f382975a03334f2bfb128f048abd53561c28bd8926ba not found: ID does not exist" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.237899 4672 scope.go:117] "RemoveContainer" containerID="fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c" Oct 07 15:37:50 crc kubenswrapper[4672]: E1007 15:37:50.238501 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c\": container with ID starting with fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c not found: ID does not exist" containerID="fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c" Oct 07 15:37:50 crc kubenswrapper[4672]: I1007 15:37:50.238527 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c"} err="failed to get container status \"fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c\": rpc error: code = NotFound desc = could not find container \"fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c\": container with ID starting with fc52b37c0d4042a06709353d6727673333aa01419f4082553dd1e275a500f99c not found: ID does not exist" Oct 07 15:37:51 crc kubenswrapper[4672]: I1007 15:37:51.904479 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae974611-9a8a-42b1-8406-5f532debaab1" path="/var/lib/kubelet/pods/ae974611-9a8a-42b1-8406-5f532debaab1/volumes" Oct 07 15:37:53 crc kubenswrapper[4672]: I1007 15:37:53.901011 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:37:53 crc kubenswrapper[4672]: E1007 15:37:53.901348 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:38:05 crc kubenswrapper[4672]: I1007 15:38:05.892559 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:38:05 crc kubenswrapper[4672]: E1007 15:38:05.893901 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:38:19 crc kubenswrapper[4672]: I1007 15:38:19.892460 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:38:19 crc kubenswrapper[4672]: E1007 15:38:19.893262 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:38:33 crc kubenswrapper[4672]: I1007 15:38:33.898962 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:38:33 crc kubenswrapper[4672]: E1007 15:38:33.899937 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:38:47 crc kubenswrapper[4672]: I1007 15:38:47.892419 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:38:47 crc kubenswrapper[4672]: E1007 15:38:47.893271 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:39:02 crc kubenswrapper[4672]: I1007 15:39:02.892638 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:39:02 crc kubenswrapper[4672]: E1007 15:39:02.893496 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:39:15 crc kubenswrapper[4672]: I1007 15:39:15.892399 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:39:15 crc kubenswrapper[4672]: E1007 15:39:15.893084 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:39:26 crc kubenswrapper[4672]: I1007 15:39:26.891844 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:39:26 crc kubenswrapper[4672]: E1007 15:39:26.893262 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:39:38 crc kubenswrapper[4672]: I1007 15:39:38.892674 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:39:38 crc kubenswrapper[4672]: E1007 15:39:38.893900 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:39:53 crc kubenswrapper[4672]: I1007 15:39:53.898380 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:39:53 crc kubenswrapper[4672]: E1007 15:39:53.899154 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:40:06 crc kubenswrapper[4672]: I1007 15:40:06.891916 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:40:06 crc kubenswrapper[4672]: E1007 15:40:06.892624 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:40:21 crc kubenswrapper[4672]: I1007 15:40:21.892228 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:40:21 crc kubenswrapper[4672]: E1007 15:40:21.892908 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:40:35 crc kubenswrapper[4672]: I1007 15:40:35.892303 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:40:35 crc kubenswrapper[4672]: E1007 15:40:35.892998 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:40:47 crc kubenswrapper[4672]: I1007 15:40:47.892233 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:40:47 crc kubenswrapper[4672]: E1007 15:40:47.892940 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:41:02 crc kubenswrapper[4672]: I1007 15:41:02.891561 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:41:02 crc kubenswrapper[4672]: E1007 15:41:02.892447 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:41:16 crc kubenswrapper[4672]: I1007 15:41:16.892705 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:41:16 crc kubenswrapper[4672]: E1007 15:41:16.893458 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:41:29 crc kubenswrapper[4672]: I1007 15:41:29.892996 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:41:29 crc kubenswrapper[4672]: E1007 15:41:29.894009 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:41:44 crc kubenswrapper[4672]: I1007 15:41:44.891658 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:41:44 crc kubenswrapper[4672]: E1007 15:41:44.892389 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:41:59 crc kubenswrapper[4672]: I1007 15:41:59.892219 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:41:59 crc kubenswrapper[4672]: E1007 15:41:59.893058 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:42:13 crc kubenswrapper[4672]: I1007 15:42:13.899161 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:42:13 crc kubenswrapper[4672]: E1007 15:42:13.900072 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:42:25 crc kubenswrapper[4672]: I1007 15:42:25.893150 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:42:25 crc kubenswrapper[4672]: E1007 15:42:25.893940 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:42:39 crc kubenswrapper[4672]: I1007 15:42:39.892152 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:42:40 crc kubenswrapper[4672]: I1007 15:42:40.808478 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"03cdb6f5990dbe334088174cddcb0a089754968450536b552fee6703a9376196"} Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.512810 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pj9xc"] Oct 07 15:44:40 crc kubenswrapper[4672]: E1007 15:44:40.513757 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae974611-9a8a-42b1-8406-5f532debaab1" containerName="extract-utilities" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.513771 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae974611-9a8a-42b1-8406-5f532debaab1" containerName="extract-utilities" Oct 07 15:44:40 crc kubenswrapper[4672]: E1007 15:44:40.513806 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae974611-9a8a-42b1-8406-5f532debaab1" containerName="extract-content" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.513814 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae974611-9a8a-42b1-8406-5f532debaab1" containerName="extract-content" Oct 07 15:44:40 crc kubenswrapper[4672]: E1007 15:44:40.513822 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae974611-9a8a-42b1-8406-5f532debaab1" containerName="registry-server" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.513828 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae974611-9a8a-42b1-8406-5f532debaab1" containerName="registry-server" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.514032 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae974611-9a8a-42b1-8406-5f532debaab1" containerName="registry-server" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.515478 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.540519 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pj9xc"] Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.602214 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/458481ee-d389-40b6-a1bd-547d99652d6d-utilities\") pod \"redhat-operators-pj9xc\" (UID: \"458481ee-d389-40b6-a1bd-547d99652d6d\") " pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.602324 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmz5f\" (UniqueName: \"kubernetes.io/projected/458481ee-d389-40b6-a1bd-547d99652d6d-kube-api-access-hmz5f\") pod \"redhat-operators-pj9xc\" (UID: \"458481ee-d389-40b6-a1bd-547d99652d6d\") " pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.602843 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/458481ee-d389-40b6-a1bd-547d99652d6d-catalog-content\") pod \"redhat-operators-pj9xc\" (UID: \"458481ee-d389-40b6-a1bd-547d99652d6d\") " pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.705506 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/458481ee-d389-40b6-a1bd-547d99652d6d-utilities\") pod \"redhat-operators-pj9xc\" (UID: \"458481ee-d389-40b6-a1bd-547d99652d6d\") " pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.705735 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmz5f\" (UniqueName: \"kubernetes.io/projected/458481ee-d389-40b6-a1bd-547d99652d6d-kube-api-access-hmz5f\") pod \"redhat-operators-pj9xc\" (UID: \"458481ee-d389-40b6-a1bd-547d99652d6d\") " pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.705966 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/458481ee-d389-40b6-a1bd-547d99652d6d-catalog-content\") pod \"redhat-operators-pj9xc\" (UID: \"458481ee-d389-40b6-a1bd-547d99652d6d\") " pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.706105 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/458481ee-d389-40b6-a1bd-547d99652d6d-utilities\") pod \"redhat-operators-pj9xc\" (UID: \"458481ee-d389-40b6-a1bd-547d99652d6d\") " pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.706599 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/458481ee-d389-40b6-a1bd-547d99652d6d-catalog-content\") pod \"redhat-operators-pj9xc\" (UID: \"458481ee-d389-40b6-a1bd-547d99652d6d\") " pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.728225 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmz5f\" (UniqueName: \"kubernetes.io/projected/458481ee-d389-40b6-a1bd-547d99652d6d-kube-api-access-hmz5f\") pod \"redhat-operators-pj9xc\" (UID: \"458481ee-d389-40b6-a1bd-547d99652d6d\") " pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:40 crc kubenswrapper[4672]: I1007 15:44:40.835721 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:44:41 crc kubenswrapper[4672]: I1007 15:44:41.364552 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pj9xc"] Oct 07 15:44:41 crc kubenswrapper[4672]: I1007 15:44:41.890359 4672 generic.go:334] "Generic (PLEG): container finished" podID="458481ee-d389-40b6-a1bd-547d99652d6d" containerID="d2206f082371583f20cf96e709e7c8619b7f0d66bc7f1b816aabe92fd4a10ee0" exitCode=0 Oct 07 15:44:41 crc kubenswrapper[4672]: I1007 15:44:41.890634 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pj9xc" event={"ID":"458481ee-d389-40b6-a1bd-547d99652d6d","Type":"ContainerDied","Data":"d2206f082371583f20cf96e709e7c8619b7f0d66bc7f1b816aabe92fd4a10ee0"} Oct 07 15:44:41 crc kubenswrapper[4672]: I1007 15:44:41.890888 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pj9xc" event={"ID":"458481ee-d389-40b6-a1bd-547d99652d6d","Type":"ContainerStarted","Data":"82a31239f100200c737d1fd39707245595e9bf2aadf736bae5c966b09184018a"} Oct 07 15:44:41 crc kubenswrapper[4672]: I1007 15:44:41.895221 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 15:44:51 crc kubenswrapper[4672]: I1007 15:44:51.996632 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pj9xc" event={"ID":"458481ee-d389-40b6-a1bd-547d99652d6d","Type":"ContainerStarted","Data":"09ef2e8d6c88b62027a4fc5fd2a75fcbd100a46f2faa0ff41ed454c6d909ef3b"} Oct 07 15:44:55 crc kubenswrapper[4672]: I1007 15:44:55.024452 4672 generic.go:334] "Generic (PLEG): container finished" podID="458481ee-d389-40b6-a1bd-547d99652d6d" containerID="09ef2e8d6c88b62027a4fc5fd2a75fcbd100a46f2faa0ff41ed454c6d909ef3b" exitCode=0 Oct 07 15:44:55 crc kubenswrapper[4672]: I1007 15:44:55.024521 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pj9xc" event={"ID":"458481ee-d389-40b6-a1bd-547d99652d6d","Type":"ContainerDied","Data":"09ef2e8d6c88b62027a4fc5fd2a75fcbd100a46f2faa0ff41ed454c6d909ef3b"} Oct 07 15:44:56 crc kubenswrapper[4672]: I1007 15:44:56.037320 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pj9xc" event={"ID":"458481ee-d389-40b6-a1bd-547d99652d6d","Type":"ContainerStarted","Data":"6f577fca64ac22fdab1fb729251c01c6a822484c17eb5ad50f96e6b679dcb279"} Oct 07 15:44:56 crc kubenswrapper[4672]: I1007 15:44:56.062408 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pj9xc" podStartSLOduration=2.514382757 podStartE2EDuration="16.062387037s" podCreationTimestamp="2025-10-07 15:44:40 +0000 UTC" firstStartedPulling="2025-10-07 15:44:41.894897197 +0000 UTC m=+3358.870075778" lastFinishedPulling="2025-10-07 15:44:55.442901487 +0000 UTC m=+3372.418080058" observedRunningTime="2025-10-07 15:44:56.061403759 +0000 UTC m=+3373.036582340" watchObservedRunningTime="2025-10-07 15:44:56.062387037 +0000 UTC m=+3373.037565618" Oct 07 15:44:56 crc kubenswrapper[4672]: I1007 15:44:56.650508 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:44:56 crc kubenswrapper[4672]: I1007 15:44:56.650569 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.182959 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc"] Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.184905 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.189472 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.189777 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.193397 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc"] Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.247122 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftrw4\" (UniqueName: \"kubernetes.io/projected/799a9b3a-f48e-44bc-b6bf-9f7723217822-kube-api-access-ftrw4\") pod \"collect-profiles-29330865-gltjc\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.247227 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/799a9b3a-f48e-44bc-b6bf-9f7723217822-config-volume\") pod \"collect-profiles-29330865-gltjc\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.247299 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/799a9b3a-f48e-44bc-b6bf-9f7723217822-secret-volume\") pod \"collect-profiles-29330865-gltjc\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.348763 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftrw4\" (UniqueName: \"kubernetes.io/projected/799a9b3a-f48e-44bc-b6bf-9f7723217822-kube-api-access-ftrw4\") pod \"collect-profiles-29330865-gltjc\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.348865 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/799a9b3a-f48e-44bc-b6bf-9f7723217822-config-volume\") pod \"collect-profiles-29330865-gltjc\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.348931 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/799a9b3a-f48e-44bc-b6bf-9f7723217822-secret-volume\") pod \"collect-profiles-29330865-gltjc\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.349802 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/799a9b3a-f48e-44bc-b6bf-9f7723217822-config-volume\") pod \"collect-profiles-29330865-gltjc\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.356779 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/799a9b3a-f48e-44bc-b6bf-9f7723217822-secret-volume\") pod \"collect-profiles-29330865-gltjc\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.365348 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftrw4\" (UniqueName: \"kubernetes.io/projected/799a9b3a-f48e-44bc-b6bf-9f7723217822-kube-api-access-ftrw4\") pod \"collect-profiles-29330865-gltjc\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.561000 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.837599 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:45:00 crc kubenswrapper[4672]: I1007 15:45:00.838047 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:45:01 crc kubenswrapper[4672]: I1007 15:45:01.069255 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc"] Oct 07 15:45:01 crc kubenswrapper[4672]: W1007 15:45:01.080976 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod799a9b3a_f48e_44bc_b6bf_9f7723217822.slice/crio-b86faaed06bc5a00308e311473d616aa251259a6dade45e47450368b1ccf29c7 WatchSource:0}: Error finding container b86faaed06bc5a00308e311473d616aa251259a6dade45e47450368b1ccf29c7: Status 404 returned error can't find the container with id b86faaed06bc5a00308e311473d616aa251259a6dade45e47450368b1ccf29c7 Oct 07 15:45:01 crc kubenswrapper[4672]: I1007 15:45:01.883432 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pj9xc" podUID="458481ee-d389-40b6-a1bd-547d99652d6d" containerName="registry-server" probeResult="failure" output=< Oct 07 15:45:01 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 15:45:01 crc kubenswrapper[4672]: > Oct 07 15:45:02 crc kubenswrapper[4672]: I1007 15:45:02.103571 4672 generic.go:334] "Generic (PLEG): container finished" podID="799a9b3a-f48e-44bc-b6bf-9f7723217822" containerID="99da09adb8b75ec5b4d951e89a2541fb5b4f34bbff04e26b0111d9e47f701595" exitCode=0 Oct 07 15:45:02 crc kubenswrapper[4672]: I1007 15:45:02.103630 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" event={"ID":"799a9b3a-f48e-44bc-b6bf-9f7723217822","Type":"ContainerDied","Data":"99da09adb8b75ec5b4d951e89a2541fb5b4f34bbff04e26b0111d9e47f701595"} Oct 07 15:45:02 crc kubenswrapper[4672]: I1007 15:45:02.103669 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" event={"ID":"799a9b3a-f48e-44bc-b6bf-9f7723217822","Type":"ContainerStarted","Data":"b86faaed06bc5a00308e311473d616aa251259a6dade45e47450368b1ccf29c7"} Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.525516 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.627238 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftrw4\" (UniqueName: \"kubernetes.io/projected/799a9b3a-f48e-44bc-b6bf-9f7723217822-kube-api-access-ftrw4\") pod \"799a9b3a-f48e-44bc-b6bf-9f7723217822\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.627613 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/799a9b3a-f48e-44bc-b6bf-9f7723217822-config-volume\") pod \"799a9b3a-f48e-44bc-b6bf-9f7723217822\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.627683 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/799a9b3a-f48e-44bc-b6bf-9f7723217822-secret-volume\") pod \"799a9b3a-f48e-44bc-b6bf-9f7723217822\" (UID: \"799a9b3a-f48e-44bc-b6bf-9f7723217822\") " Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.628257 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/799a9b3a-f48e-44bc-b6bf-9f7723217822-config-volume" (OuterVolumeSpecName: "config-volume") pod "799a9b3a-f48e-44bc-b6bf-9f7723217822" (UID: "799a9b3a-f48e-44bc-b6bf-9f7723217822"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.628982 4672 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/799a9b3a-f48e-44bc-b6bf-9f7723217822-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.634559 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/799a9b3a-f48e-44bc-b6bf-9f7723217822-kube-api-access-ftrw4" (OuterVolumeSpecName: "kube-api-access-ftrw4") pod "799a9b3a-f48e-44bc-b6bf-9f7723217822" (UID: "799a9b3a-f48e-44bc-b6bf-9f7723217822"). InnerVolumeSpecName "kube-api-access-ftrw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.639251 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/799a9b3a-f48e-44bc-b6bf-9f7723217822-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "799a9b3a-f48e-44bc-b6bf-9f7723217822" (UID: "799a9b3a-f48e-44bc-b6bf-9f7723217822"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.731058 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftrw4\" (UniqueName: \"kubernetes.io/projected/799a9b3a-f48e-44bc-b6bf-9f7723217822-kube-api-access-ftrw4\") on node \"crc\" DevicePath \"\"" Oct 07 15:45:03 crc kubenswrapper[4672]: I1007 15:45:03.731110 4672 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/799a9b3a-f48e-44bc-b6bf-9f7723217822-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 15:45:04 crc kubenswrapper[4672]: I1007 15:45:04.123449 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" event={"ID":"799a9b3a-f48e-44bc-b6bf-9f7723217822","Type":"ContainerDied","Data":"b86faaed06bc5a00308e311473d616aa251259a6dade45e47450368b1ccf29c7"} Oct 07 15:45:04 crc kubenswrapper[4672]: I1007 15:45:04.123532 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b86faaed06bc5a00308e311473d616aa251259a6dade45e47450368b1ccf29c7" Oct 07 15:45:04 crc kubenswrapper[4672]: I1007 15:45:04.123525 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330865-gltjc" Oct 07 15:45:04 crc kubenswrapper[4672]: I1007 15:45:04.620925 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx"] Oct 07 15:45:04 crc kubenswrapper[4672]: I1007 15:45:04.634071 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330820-bwmjx"] Oct 07 15:45:05 crc kubenswrapper[4672]: I1007 15:45:05.916367 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dac1f5d-9a0e-469d-9072-c200b51d991a" path="/var/lib/kubelet/pods/9dac1f5d-9a0e-469d-9072-c200b51d991a/volumes" Oct 07 15:45:10 crc kubenswrapper[4672]: I1007 15:45:10.890213 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:45:10 crc kubenswrapper[4672]: I1007 15:45:10.943574 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pj9xc" Oct 07 15:45:11 crc kubenswrapper[4672]: I1007 15:45:11.536249 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pj9xc"] Oct 07 15:45:11 crc kubenswrapper[4672]: I1007 15:45:11.722352 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cnjn7"] Oct 07 15:45:11 crc kubenswrapper[4672]: I1007 15:45:11.722664 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cnjn7" podUID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerName="registry-server" containerID="cri-o://f8cc96227d14ef3db2b06f0b44a71e2fe3d737a74454c832b45b929f9ca97e15" gracePeriod=2 Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.190484 4672 generic.go:334] "Generic (PLEG): container finished" podID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerID="f8cc96227d14ef3db2b06f0b44a71e2fe3d737a74454c832b45b929f9ca97e15" exitCode=0 Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.190572 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cnjn7" event={"ID":"24a8e927-1908-46bd-8f03-25e2e63cc855","Type":"ContainerDied","Data":"f8cc96227d14ef3db2b06f0b44a71e2fe3d737a74454c832b45b929f9ca97e15"} Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.190829 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cnjn7" event={"ID":"24a8e927-1908-46bd-8f03-25e2e63cc855","Type":"ContainerDied","Data":"bc736a06ef0ea2a9d0c42d776dc796182f31a3c496460833aca5643ba7f6c0ac"} Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.190879 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc736a06ef0ea2a9d0c42d776dc796182f31a3c496460833aca5643ba7f6c0ac" Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.206263 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.322005 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdhbf\" (UniqueName: \"kubernetes.io/projected/24a8e927-1908-46bd-8f03-25e2e63cc855-kube-api-access-pdhbf\") pod \"24a8e927-1908-46bd-8f03-25e2e63cc855\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.322540 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-utilities\") pod \"24a8e927-1908-46bd-8f03-25e2e63cc855\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.322618 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-catalog-content\") pod \"24a8e927-1908-46bd-8f03-25e2e63cc855\" (UID: \"24a8e927-1908-46bd-8f03-25e2e63cc855\") " Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.329358 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-utilities" (OuterVolumeSpecName: "utilities") pod "24a8e927-1908-46bd-8f03-25e2e63cc855" (UID: "24a8e927-1908-46bd-8f03-25e2e63cc855"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.333101 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24a8e927-1908-46bd-8f03-25e2e63cc855-kube-api-access-pdhbf" (OuterVolumeSpecName: "kube-api-access-pdhbf") pod "24a8e927-1908-46bd-8f03-25e2e63cc855" (UID: "24a8e927-1908-46bd-8f03-25e2e63cc855"). InnerVolumeSpecName "kube-api-access-pdhbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.398330 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "24a8e927-1908-46bd-8f03-25e2e63cc855" (UID: "24a8e927-1908-46bd-8f03-25e2e63cc855"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.425637 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.425961 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a8e927-1908-46bd-8f03-25e2e63cc855-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:45:12 crc kubenswrapper[4672]: I1007 15:45:12.426107 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdhbf\" (UniqueName: \"kubernetes.io/projected/24a8e927-1908-46bd-8f03-25e2e63cc855-kube-api-access-pdhbf\") on node \"crc\" DevicePath \"\"" Oct 07 15:45:13 crc kubenswrapper[4672]: I1007 15:45:13.199270 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cnjn7" Oct 07 15:45:13 crc kubenswrapper[4672]: I1007 15:45:13.259949 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cnjn7"] Oct 07 15:45:13 crc kubenswrapper[4672]: I1007 15:45:13.269626 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cnjn7"] Oct 07 15:45:13 crc kubenswrapper[4672]: I1007 15:45:13.901946 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24a8e927-1908-46bd-8f03-25e2e63cc855" path="/var/lib/kubelet/pods/24a8e927-1908-46bd-8f03-25e2e63cc855/volumes" Oct 07 15:45:26 crc kubenswrapper[4672]: I1007 15:45:26.650271 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:45:26 crc kubenswrapper[4672]: I1007 15:45:26.650692 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:45:55 crc kubenswrapper[4672]: I1007 15:45:55.508129 4672 scope.go:117] "RemoveContainer" containerID="90bbad566c354bf22328a2f475a13d16cdfb54033dfc306b3f388276a703c378" Oct 07 15:45:55 crc kubenswrapper[4672]: I1007 15:45:55.555881 4672 scope.go:117] "RemoveContainer" containerID="4905334ff529958262599844b50b0f161be73f4908a31dd12f7143579d905471" Oct 07 15:45:55 crc kubenswrapper[4672]: I1007 15:45:55.581375 4672 scope.go:117] "RemoveContainer" containerID="fc24b2b4fd6de56b4e9f745416f1530ef078b7afc8527dfdd1929d4dc3e0770f" Oct 07 15:45:55 crc kubenswrapper[4672]: I1007 15:45:55.619015 4672 scope.go:117] "RemoveContainer" containerID="f8cc96227d14ef3db2b06f0b44a71e2fe3d737a74454c832b45b929f9ca97e15" Oct 07 15:45:56 crc kubenswrapper[4672]: I1007 15:45:56.650090 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:45:56 crc kubenswrapper[4672]: I1007 15:45:56.650160 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:45:56 crc kubenswrapper[4672]: I1007 15:45:56.650225 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:45:56 crc kubenswrapper[4672]: I1007 15:45:56.651202 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03cdb6f5990dbe334088174cddcb0a089754968450536b552fee6703a9376196"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:45:56 crc kubenswrapper[4672]: I1007 15:45:56.651299 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://03cdb6f5990dbe334088174cddcb0a089754968450536b552fee6703a9376196" gracePeriod=600 Oct 07 15:45:57 crc kubenswrapper[4672]: I1007 15:45:57.584211 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="03cdb6f5990dbe334088174cddcb0a089754968450536b552fee6703a9376196" exitCode=0 Oct 07 15:45:57 crc kubenswrapper[4672]: I1007 15:45:57.584281 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"03cdb6f5990dbe334088174cddcb0a089754968450536b552fee6703a9376196"} Oct 07 15:45:57 crc kubenswrapper[4672]: I1007 15:45:57.584804 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16"} Oct 07 15:45:57 crc kubenswrapper[4672]: I1007 15:45:57.584837 4672 scope.go:117] "RemoveContainer" containerID="6074eb3764934a2d46262257c3843e1b6bd1a8954cd6d333fc8df0e30d91e7ff" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.789409 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qls5z"] Oct 07 15:46:10 crc kubenswrapper[4672]: E1007 15:46:10.790692 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="799a9b3a-f48e-44bc-b6bf-9f7723217822" containerName="collect-profiles" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.790777 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="799a9b3a-f48e-44bc-b6bf-9f7723217822" containerName="collect-profiles" Oct 07 15:46:10 crc kubenswrapper[4672]: E1007 15:46:10.790793 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerName="registry-server" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.790799 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerName="registry-server" Oct 07 15:46:10 crc kubenswrapper[4672]: E1007 15:46:10.790810 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerName="extract-utilities" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.790818 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerName="extract-utilities" Oct 07 15:46:10 crc kubenswrapper[4672]: E1007 15:46:10.790830 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerName="extract-content" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.790835 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerName="extract-content" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.791107 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="799a9b3a-f48e-44bc-b6bf-9f7723217822" containerName="collect-profiles" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.791150 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="24a8e927-1908-46bd-8f03-25e2e63cc855" containerName="registry-server" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.792656 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.809292 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qls5z"] Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.891308 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp9t4\" (UniqueName: \"kubernetes.io/projected/bb72d7ca-6949-497a-9cbc-b1179df12691-kube-api-access-zp9t4\") pod \"redhat-marketplace-qls5z\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.891467 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-catalog-content\") pod \"redhat-marketplace-qls5z\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.891505 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-utilities\") pod \"redhat-marketplace-qls5z\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.993618 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp9t4\" (UniqueName: \"kubernetes.io/projected/bb72d7ca-6949-497a-9cbc-b1179df12691-kube-api-access-zp9t4\") pod \"redhat-marketplace-qls5z\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.993700 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-catalog-content\") pod \"redhat-marketplace-qls5z\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.993727 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-utilities\") pod \"redhat-marketplace-qls5z\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.994391 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-utilities\") pod \"redhat-marketplace-qls5z\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:10 crc kubenswrapper[4672]: I1007 15:46:10.994994 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-catalog-content\") pod \"redhat-marketplace-qls5z\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.015915 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp9t4\" (UniqueName: \"kubernetes.io/projected/bb72d7ca-6949-497a-9cbc-b1179df12691-kube-api-access-zp9t4\") pod \"redhat-marketplace-qls5z\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.118460 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.563179 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qls5z"] Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.728394 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qls5z" event={"ID":"bb72d7ca-6949-497a-9cbc-b1179df12691","Type":"ContainerStarted","Data":"f812a0e69cbbea8ff139e9cbb968f51719c33d0cdeda9163fd2c68d8e7aa5320"} Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.775038 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5txpt"] Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.777422 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.798554 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5txpt"] Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.913880 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-utilities\") pod \"certified-operators-5txpt\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.913928 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k254v\" (UniqueName: \"kubernetes.io/projected/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-kube-api-access-k254v\") pod \"certified-operators-5txpt\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:11 crc kubenswrapper[4672]: I1007 15:46:11.914008 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-catalog-content\") pod \"certified-operators-5txpt\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.016516 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-utilities\") pod \"certified-operators-5txpt\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.016581 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k254v\" (UniqueName: \"kubernetes.io/projected/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-kube-api-access-k254v\") pod \"certified-operators-5txpt\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.016622 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-catalog-content\") pod \"certified-operators-5txpt\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.017747 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-catalog-content\") pod \"certified-operators-5txpt\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.017967 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-utilities\") pod \"certified-operators-5txpt\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.036623 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k254v\" (UniqueName: \"kubernetes.io/projected/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-kube-api-access-k254v\") pod \"certified-operators-5txpt\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.152291 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.632079 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5txpt"] Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.750239 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5txpt" event={"ID":"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a","Type":"ContainerStarted","Data":"7bc01350767a8942c7ada93d2f4b580a3f107980c0dc58d15b788f6ea9d300b6"} Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.753168 4672 generic.go:334] "Generic (PLEG): container finished" podID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerID="774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97" exitCode=0 Oct 07 15:46:12 crc kubenswrapper[4672]: I1007 15:46:12.753217 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qls5z" event={"ID":"bb72d7ca-6949-497a-9cbc-b1179df12691","Type":"ContainerDied","Data":"774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97"} Oct 07 15:46:13 crc kubenswrapper[4672]: I1007 15:46:13.771206 4672 generic.go:334] "Generic (PLEG): container finished" podID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerID="e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1" exitCode=0 Oct 07 15:46:13 crc kubenswrapper[4672]: I1007 15:46:13.771258 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5txpt" event={"ID":"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a","Type":"ContainerDied","Data":"e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1"} Oct 07 15:46:14 crc kubenswrapper[4672]: I1007 15:46:14.803973 4672 generic.go:334] "Generic (PLEG): container finished" podID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerID="f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76" exitCode=0 Oct 07 15:46:14 crc kubenswrapper[4672]: I1007 15:46:14.804387 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qls5z" event={"ID":"bb72d7ca-6949-497a-9cbc-b1179df12691","Type":"ContainerDied","Data":"f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76"} Oct 07 15:46:15 crc kubenswrapper[4672]: I1007 15:46:15.815498 4672 generic.go:334] "Generic (PLEG): container finished" podID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerID="3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1" exitCode=0 Oct 07 15:46:15 crc kubenswrapper[4672]: I1007 15:46:15.815595 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5txpt" event={"ID":"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a","Type":"ContainerDied","Data":"3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1"} Oct 07 15:46:15 crc kubenswrapper[4672]: I1007 15:46:15.819655 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qls5z" event={"ID":"bb72d7ca-6949-497a-9cbc-b1179df12691","Type":"ContainerStarted","Data":"f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9"} Oct 07 15:46:15 crc kubenswrapper[4672]: I1007 15:46:15.870394 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qls5z" podStartSLOduration=3.363424296 podStartE2EDuration="5.87037612s" podCreationTimestamp="2025-10-07 15:46:10 +0000 UTC" firstStartedPulling="2025-10-07 15:46:12.759158935 +0000 UTC m=+3449.734337516" lastFinishedPulling="2025-10-07 15:46:15.266110759 +0000 UTC m=+3452.241289340" observedRunningTime="2025-10-07 15:46:15.862381429 +0000 UTC m=+3452.837560010" watchObservedRunningTime="2025-10-07 15:46:15.87037612 +0000 UTC m=+3452.845554701" Oct 07 15:46:16 crc kubenswrapper[4672]: I1007 15:46:16.831681 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5txpt" event={"ID":"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a","Type":"ContainerStarted","Data":"60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f"} Oct 07 15:46:16 crc kubenswrapper[4672]: I1007 15:46:16.856982 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5txpt" podStartSLOduration=3.150781364 podStartE2EDuration="5.856967177s" podCreationTimestamp="2025-10-07 15:46:11 +0000 UTC" firstStartedPulling="2025-10-07 15:46:13.77266288 +0000 UTC m=+3450.747841461" lastFinishedPulling="2025-10-07 15:46:16.478848693 +0000 UTC m=+3453.454027274" observedRunningTime="2025-10-07 15:46:16.852682813 +0000 UTC m=+3453.827861394" watchObservedRunningTime="2025-10-07 15:46:16.856967177 +0000 UTC m=+3453.832145748" Oct 07 15:46:21 crc kubenswrapper[4672]: I1007 15:46:21.119451 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:21 crc kubenswrapper[4672]: I1007 15:46:21.120227 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:21 crc kubenswrapper[4672]: I1007 15:46:21.166432 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:21 crc kubenswrapper[4672]: I1007 15:46:21.917389 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:22 crc kubenswrapper[4672]: I1007 15:46:22.153206 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:22 crc kubenswrapper[4672]: I1007 15:46:22.153259 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:22 crc kubenswrapper[4672]: I1007 15:46:22.202646 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:22 crc kubenswrapper[4672]: I1007 15:46:22.925305 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:27 crc kubenswrapper[4672]: I1007 15:46:27.983106 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qls5z"] Oct 07 15:46:27 crc kubenswrapper[4672]: I1007 15:46:27.983943 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qls5z" podUID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerName="registry-server" containerID="cri-o://f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9" gracePeriod=2 Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.171763 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5txpt"] Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.172072 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5txpt" podUID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerName="registry-server" containerID="cri-o://60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f" gracePeriod=2 Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.504195 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.644780 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.650156 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp9t4\" (UniqueName: \"kubernetes.io/projected/bb72d7ca-6949-497a-9cbc-b1179df12691-kube-api-access-zp9t4\") pod \"bb72d7ca-6949-497a-9cbc-b1179df12691\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.650241 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-utilities\") pod \"bb72d7ca-6949-497a-9cbc-b1179df12691\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.650449 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-catalog-content\") pod \"bb72d7ca-6949-497a-9cbc-b1179df12691\" (UID: \"bb72d7ca-6949-497a-9cbc-b1179df12691\") " Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.651176 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-utilities" (OuterVolumeSpecName: "utilities") pod "bb72d7ca-6949-497a-9cbc-b1179df12691" (UID: "bb72d7ca-6949-497a-9cbc-b1179df12691"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.656149 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb72d7ca-6949-497a-9cbc-b1179df12691-kube-api-access-zp9t4" (OuterVolumeSpecName: "kube-api-access-zp9t4") pod "bb72d7ca-6949-497a-9cbc-b1179df12691" (UID: "bb72d7ca-6949-497a-9cbc-b1179df12691"). InnerVolumeSpecName "kube-api-access-zp9t4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.667037 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb72d7ca-6949-497a-9cbc-b1179df12691" (UID: "bb72d7ca-6949-497a-9cbc-b1179df12691"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.752793 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-utilities\") pod \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.752865 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-catalog-content\") pod \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.753093 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k254v\" (UniqueName: \"kubernetes.io/projected/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-kube-api-access-k254v\") pod \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\" (UID: \"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a\") " Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.754315 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-utilities" (OuterVolumeSpecName: "utilities") pod "4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" (UID: "4c84c905-f0ab-49ab-9dd7-147bf7b6e67a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.754851 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp9t4\" (UniqueName: \"kubernetes.io/projected/bb72d7ca-6949-497a-9cbc-b1179df12691-kube-api-access-zp9t4\") on node \"crc\" DevicePath \"\"" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.754872 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.754887 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.754899 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb72d7ca-6949-497a-9cbc-b1179df12691-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.757519 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-kube-api-access-k254v" (OuterVolumeSpecName: "kube-api-access-k254v") pod "4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" (UID: "4c84c905-f0ab-49ab-9dd7-147bf7b6e67a"). InnerVolumeSpecName "kube-api-access-k254v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.799613 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" (UID: "4c84c905-f0ab-49ab-9dd7-147bf7b6e67a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.857409 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.857481 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k254v\" (UniqueName: \"kubernetes.io/projected/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a-kube-api-access-k254v\") on node \"crc\" DevicePath \"\"" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.937911 4672 generic.go:334] "Generic (PLEG): container finished" podID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerID="60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f" exitCode=0 Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.937962 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5txpt" event={"ID":"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a","Type":"ContainerDied","Data":"60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f"} Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.938009 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5txpt" event={"ID":"4c84c905-f0ab-49ab-9dd7-147bf7b6e67a","Type":"ContainerDied","Data":"7bc01350767a8942c7ada93d2f4b580a3f107980c0dc58d15b788f6ea9d300b6"} Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.938041 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5txpt" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.938063 4672 scope.go:117] "RemoveContainer" containerID="60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.941725 4672 generic.go:334] "Generic (PLEG): container finished" podID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerID="f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9" exitCode=0 Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.941798 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qls5z" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.941934 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qls5z" event={"ID":"bb72d7ca-6949-497a-9cbc-b1179df12691","Type":"ContainerDied","Data":"f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9"} Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.942095 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qls5z" event={"ID":"bb72d7ca-6949-497a-9cbc-b1179df12691","Type":"ContainerDied","Data":"f812a0e69cbbea8ff139e9cbb968f51719c33d0cdeda9163fd2c68d8e7aa5320"} Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.962742 4672 scope.go:117] "RemoveContainer" containerID="3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.988570 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qls5z"] Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.993887 4672 scope.go:117] "RemoveContainer" containerID="e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1" Oct 07 15:46:28 crc kubenswrapper[4672]: I1007 15:46:28.997679 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qls5z"] Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.008734 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5txpt"] Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.026707 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5txpt"] Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.027854 4672 scope.go:117] "RemoveContainer" containerID="60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f" Oct 07 15:46:29 crc kubenswrapper[4672]: E1007 15:46:29.028358 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f\": container with ID starting with 60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f not found: ID does not exist" containerID="60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.028401 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f"} err="failed to get container status \"60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f\": rpc error: code = NotFound desc = could not find container \"60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f\": container with ID starting with 60297f09b53f19e2188656c3f8cd58b8ba31c22f0155b881d065ab5e1194979f not found: ID does not exist" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.028431 4672 scope.go:117] "RemoveContainer" containerID="3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1" Oct 07 15:46:29 crc kubenswrapper[4672]: E1007 15:46:29.028707 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1\": container with ID starting with 3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1 not found: ID does not exist" containerID="3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.028742 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1"} err="failed to get container status \"3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1\": rpc error: code = NotFound desc = could not find container \"3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1\": container with ID starting with 3ef0dbd1da1a7ff61e5dd6bf505ec01418f45392bfe4eeda65e1c4152da2aca1 not found: ID does not exist" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.028771 4672 scope.go:117] "RemoveContainer" containerID="e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1" Oct 07 15:46:29 crc kubenswrapper[4672]: E1007 15:46:29.029126 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1\": container with ID starting with e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1 not found: ID does not exist" containerID="e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.029151 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1"} err="failed to get container status \"e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1\": rpc error: code = NotFound desc = could not find container \"e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1\": container with ID starting with e27fa75668b7dacb0e6bf3aeaa31d4a312eb7138749b7d9bb17eb44a63c107d1 not found: ID does not exist" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.029167 4672 scope.go:117] "RemoveContainer" containerID="f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.061307 4672 scope.go:117] "RemoveContainer" containerID="f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.096389 4672 scope.go:117] "RemoveContainer" containerID="774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.161811 4672 scope.go:117] "RemoveContainer" containerID="f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9" Oct 07 15:46:29 crc kubenswrapper[4672]: E1007 15:46:29.162511 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9\": container with ID starting with f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9 not found: ID does not exist" containerID="f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.162575 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9"} err="failed to get container status \"f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9\": rpc error: code = NotFound desc = could not find container \"f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9\": container with ID starting with f7b10e2f3273251061b93ac0f21f13db0fe6f08305ab8a853410bc8ef27b6af9 not found: ID does not exist" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.162619 4672 scope.go:117] "RemoveContainer" containerID="f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76" Oct 07 15:46:29 crc kubenswrapper[4672]: E1007 15:46:29.163702 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76\": container with ID starting with f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76 not found: ID does not exist" containerID="f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.163750 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76"} err="failed to get container status \"f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76\": rpc error: code = NotFound desc = could not find container \"f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76\": container with ID starting with f954a95f6be0c4fd1139bf6df7fe8bb3829ffbf2a73c02dcf8047a3d0b30ba76 not found: ID does not exist" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.163790 4672 scope.go:117] "RemoveContainer" containerID="774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97" Oct 07 15:46:29 crc kubenswrapper[4672]: E1007 15:46:29.164194 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97\": container with ID starting with 774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97 not found: ID does not exist" containerID="774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.164220 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97"} err="failed to get container status \"774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97\": rpc error: code = NotFound desc = could not find container \"774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97\": container with ID starting with 774b385724e75a96e956f15fb43f3a380cd33f9edd8105e07e08d655bfc76a97 not found: ID does not exist" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.904454 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" path="/var/lib/kubelet/pods/4c84c905-f0ab-49ab-9dd7-147bf7b6e67a/volumes" Oct 07 15:46:29 crc kubenswrapper[4672]: I1007 15:46:29.905433 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb72d7ca-6949-497a-9cbc-b1179df12691" path="/var/lib/kubelet/pods/bb72d7ca-6949-497a-9cbc-b1179df12691/volumes" Oct 07 15:47:22 crc kubenswrapper[4672]: I1007 15:47:22.452592 4672 generic.go:334] "Generic (PLEG): container finished" podID="5104b964-0b2d-4b1f-920b-a0b3016ed885" containerID="86c11bf094befdad0a24f39255687c210bcf6e09a383198b9d27c13e46b3f937" exitCode=0 Oct 07 15:47:22 crc kubenswrapper[4672]: I1007 15:47:22.452796 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5104b964-0b2d-4b1f-920b-a0b3016ed885","Type":"ContainerDied","Data":"86c11bf094befdad0a24f39255687c210bcf6e09a383198b9d27c13e46b3f937"} Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.818507 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.947920 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-workdir\") pod \"5104b964-0b2d-4b1f-920b-a0b3016ed885\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.948012 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-config-data\") pod \"5104b964-0b2d-4b1f-920b-a0b3016ed885\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.948050 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config\") pod \"5104b964-0b2d-4b1f-920b-a0b3016ed885\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.948081 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config-secret\") pod \"5104b964-0b2d-4b1f-920b-a0b3016ed885\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.948123 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ssh-key\") pod \"5104b964-0b2d-4b1f-920b-a0b3016ed885\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.948154 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-temporary\") pod \"5104b964-0b2d-4b1f-920b-a0b3016ed885\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.948222 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ca-certs\") pod \"5104b964-0b2d-4b1f-920b-a0b3016ed885\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.948253 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"5104b964-0b2d-4b1f-920b-a0b3016ed885\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.948271 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v89n\" (UniqueName: \"kubernetes.io/projected/5104b964-0b2d-4b1f-920b-a0b3016ed885-kube-api-access-7v89n\") pod \"5104b964-0b2d-4b1f-920b-a0b3016ed885\" (UID: \"5104b964-0b2d-4b1f-920b-a0b3016ed885\") " Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.948825 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-config-data" (OuterVolumeSpecName: "config-data") pod "5104b964-0b2d-4b1f-920b-a0b3016ed885" (UID: "5104b964-0b2d-4b1f-920b-a0b3016ed885"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.949434 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "5104b964-0b2d-4b1f-920b-a0b3016ed885" (UID: "5104b964-0b2d-4b1f-920b-a0b3016ed885"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.953955 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "5104b964-0b2d-4b1f-920b-a0b3016ed885" (UID: "5104b964-0b2d-4b1f-920b-a0b3016ed885"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.954577 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "test-operator-logs") pod "5104b964-0b2d-4b1f-920b-a0b3016ed885" (UID: "5104b964-0b2d-4b1f-920b-a0b3016ed885"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.954718 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5104b964-0b2d-4b1f-920b-a0b3016ed885-kube-api-access-7v89n" (OuterVolumeSpecName: "kube-api-access-7v89n") pod "5104b964-0b2d-4b1f-920b-a0b3016ed885" (UID: "5104b964-0b2d-4b1f-920b-a0b3016ed885"). InnerVolumeSpecName "kube-api-access-7v89n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.978528 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "5104b964-0b2d-4b1f-920b-a0b3016ed885" (UID: "5104b964-0b2d-4b1f-920b-a0b3016ed885"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.981516 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5104b964-0b2d-4b1f-920b-a0b3016ed885" (UID: "5104b964-0b2d-4b1f-920b-a0b3016ed885"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:47:23 crc kubenswrapper[4672]: I1007 15:47:23.982752 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "5104b964-0b2d-4b1f-920b-a0b3016ed885" (UID: "5104b964-0b2d-4b1f-920b-a0b3016ed885"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.007582 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "5104b964-0b2d-4b1f-920b-a0b3016ed885" (UID: "5104b964-0b2d-4b1f-920b-a0b3016ed885"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.050957 4672 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.050993 4672 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.051005 4672 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.051019 4672 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5104b964-0b2d-4b1f-920b-a0b3016ed885-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.051043 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v89n\" (UniqueName: \"kubernetes.io/projected/5104b964-0b2d-4b1f-920b-a0b3016ed885-kube-api-access-7v89n\") on node \"crc\" DevicePath \"\"" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.051068 4672 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.051079 4672 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5104b964-0b2d-4b1f-920b-a0b3016ed885-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.051089 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.051100 4672 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5104b964-0b2d-4b1f-920b-a0b3016ed885-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.071385 4672 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.152564 4672 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.470676 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5104b964-0b2d-4b1f-920b-a0b3016ed885","Type":"ContainerDied","Data":"d5bc22cbf94a0c399815c09c41f42133268d75bc9a33c4849b1374af735b17f5"} Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.470724 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5bc22cbf94a0c399815c09c41f42133268d75bc9a33c4849b1374af735b17f5" Oct 07 15:47:24 crc kubenswrapper[4672]: I1007 15:47:24.470752 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.030206 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 07 15:47:29 crc kubenswrapper[4672]: E1007 15:47:29.031396 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerName="extract-utilities" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.031414 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerName="extract-utilities" Oct 07 15:47:29 crc kubenswrapper[4672]: E1007 15:47:29.031430 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5104b964-0b2d-4b1f-920b-a0b3016ed885" containerName="tempest-tests-tempest-tests-runner" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.031440 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="5104b964-0b2d-4b1f-920b-a0b3016ed885" containerName="tempest-tests-tempest-tests-runner" Oct 07 15:47:29 crc kubenswrapper[4672]: E1007 15:47:29.031450 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerName="extract-content" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.031458 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerName="extract-content" Oct 07 15:47:29 crc kubenswrapper[4672]: E1007 15:47:29.031486 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerName="extract-content" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.031492 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerName="extract-content" Oct 07 15:47:29 crc kubenswrapper[4672]: E1007 15:47:29.031504 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerName="registry-server" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.031511 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerName="registry-server" Oct 07 15:47:29 crc kubenswrapper[4672]: E1007 15:47:29.031528 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerName="registry-server" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.031534 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerName="registry-server" Oct 07 15:47:29 crc kubenswrapper[4672]: E1007 15:47:29.031550 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerName="extract-utilities" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.031557 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerName="extract-utilities" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.032292 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c84c905-f0ab-49ab-9dd7-147bf7b6e67a" containerName="registry-server" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.032318 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="5104b964-0b2d-4b1f-920b-a0b3016ed885" containerName="tempest-tests-tempest-tests-runner" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.032332 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb72d7ca-6949-497a-9cbc-b1179df12691" containerName="registry-server" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.033404 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.035460 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-v6mp7" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.050808 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.170324 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q6mz\" (UniqueName: \"kubernetes.io/projected/91f7fb17-f404-4b7b-9e67-4b908e341901-kube-api-access-2q6mz\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"91f7fb17-f404-4b7b-9e67-4b908e341901\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.170837 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"91f7fb17-f404-4b7b-9e67-4b908e341901\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.273296 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"91f7fb17-f404-4b7b-9e67-4b908e341901\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.273381 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q6mz\" (UniqueName: \"kubernetes.io/projected/91f7fb17-f404-4b7b-9e67-4b908e341901-kube-api-access-2q6mz\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"91f7fb17-f404-4b7b-9e67-4b908e341901\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.274171 4672 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"91f7fb17-f404-4b7b-9e67-4b908e341901\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.305188 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q6mz\" (UniqueName: \"kubernetes.io/projected/91f7fb17-f404-4b7b-9e67-4b908e341901-kube-api-access-2q6mz\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"91f7fb17-f404-4b7b-9e67-4b908e341901\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.323348 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"91f7fb17-f404-4b7b-9e67-4b908e341901\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.365374 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 15:47:29 crc kubenswrapper[4672]: I1007 15:47:29.803820 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 07 15:47:29 crc kubenswrapper[4672]: W1007 15:47:29.810160 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91f7fb17_f404_4b7b_9e67_4b908e341901.slice/crio-8dd18ed91bacd4621f12350b1911807c379753c83ed293f8bbdf6ba8134a866c WatchSource:0}: Error finding container 8dd18ed91bacd4621f12350b1911807c379753c83ed293f8bbdf6ba8134a866c: Status 404 returned error can't find the container with id 8dd18ed91bacd4621f12350b1911807c379753c83ed293f8bbdf6ba8134a866c Oct 07 15:47:30 crc kubenswrapper[4672]: I1007 15:47:30.533878 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"91f7fb17-f404-4b7b-9e67-4b908e341901","Type":"ContainerStarted","Data":"8dd18ed91bacd4621f12350b1911807c379753c83ed293f8bbdf6ba8134a866c"} Oct 07 15:47:31 crc kubenswrapper[4672]: I1007 15:47:31.546956 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"91f7fb17-f404-4b7b-9e67-4b908e341901","Type":"ContainerStarted","Data":"0f861b5430f327e7b853eb163060bb3962da97e8ca3256d2a9e3e0f4d21129b3"} Oct 07 15:47:31 crc kubenswrapper[4672]: I1007 15:47:31.567780 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.795380446 podStartE2EDuration="2.567747503s" podCreationTimestamp="2025-10-07 15:47:29 +0000 UTC" firstStartedPulling="2025-10-07 15:47:29.813682981 +0000 UTC m=+3526.788861562" lastFinishedPulling="2025-10-07 15:47:30.586050038 +0000 UTC m=+3527.561228619" observedRunningTime="2025-10-07 15:47:31.560855734 +0000 UTC m=+3528.536034345" watchObservedRunningTime="2025-10-07 15:47:31.567747503 +0000 UTC m=+3528.542926124" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.404567 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-krr2z/must-gather-lmjv9"] Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.412764 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.415972 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-krr2z"/"openshift-service-ca.crt" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.416442 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-krr2z"/"kube-root-ca.crt" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.417334 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-krr2z"/"default-dockercfg-m5vxv" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.422806 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-krr2z/must-gather-lmjv9"] Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.577389 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba91ba6b-8fe5-4323-869c-9e2d428d7481-must-gather-output\") pod \"must-gather-lmjv9\" (UID: \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\") " pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.577459 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlr67\" (UniqueName: \"kubernetes.io/projected/ba91ba6b-8fe5-4323-869c-9e2d428d7481-kube-api-access-nlr67\") pod \"must-gather-lmjv9\" (UID: \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\") " pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.679114 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba91ba6b-8fe5-4323-869c-9e2d428d7481-must-gather-output\") pod \"must-gather-lmjv9\" (UID: \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\") " pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.679175 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlr67\" (UniqueName: \"kubernetes.io/projected/ba91ba6b-8fe5-4323-869c-9e2d428d7481-kube-api-access-nlr67\") pod \"must-gather-lmjv9\" (UID: \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\") " pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.679720 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba91ba6b-8fe5-4323-869c-9e2d428d7481-must-gather-output\") pod \"must-gather-lmjv9\" (UID: \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\") " pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.700477 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlr67\" (UniqueName: \"kubernetes.io/projected/ba91ba6b-8fe5-4323-869c-9e2d428d7481-kube-api-access-nlr67\") pod \"must-gather-lmjv9\" (UID: \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\") " pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:47:48 crc kubenswrapper[4672]: I1007 15:47:48.736765 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:47:49 crc kubenswrapper[4672]: I1007 15:47:49.285633 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-krr2z/must-gather-lmjv9"] Oct 07 15:47:49 crc kubenswrapper[4672]: W1007 15:47:49.301265 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba91ba6b_8fe5_4323_869c_9e2d428d7481.slice/crio-5e7e0cbea343af1b572479304037c731da917bbe332e89d8cf36b1ebb4958d41 WatchSource:0}: Error finding container 5e7e0cbea343af1b572479304037c731da917bbe332e89d8cf36b1ebb4958d41: Status 404 returned error can't find the container with id 5e7e0cbea343af1b572479304037c731da917bbe332e89d8cf36b1ebb4958d41 Oct 07 15:47:49 crc kubenswrapper[4672]: I1007 15:47:49.729728 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/must-gather-lmjv9" event={"ID":"ba91ba6b-8fe5-4323-869c-9e2d428d7481","Type":"ContainerStarted","Data":"5e7e0cbea343af1b572479304037c731da917bbe332e89d8cf36b1ebb4958d41"} Oct 07 15:47:53 crc kubenswrapper[4672]: I1007 15:47:53.786123 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/must-gather-lmjv9" event={"ID":"ba91ba6b-8fe5-4323-869c-9e2d428d7481","Type":"ContainerStarted","Data":"3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06"} Oct 07 15:47:53 crc kubenswrapper[4672]: I1007 15:47:53.786614 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/must-gather-lmjv9" event={"ID":"ba91ba6b-8fe5-4323-869c-9e2d428d7481","Type":"ContainerStarted","Data":"8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec"} Oct 07 15:47:53 crc kubenswrapper[4672]: I1007 15:47:53.808615 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-krr2z/must-gather-lmjv9" podStartSLOduration=2.104756422 podStartE2EDuration="5.808594452s" podCreationTimestamp="2025-10-07 15:47:48 +0000 UTC" firstStartedPulling="2025-10-07 15:47:49.306786006 +0000 UTC m=+3546.281964587" lastFinishedPulling="2025-10-07 15:47:53.010624036 +0000 UTC m=+3549.985802617" observedRunningTime="2025-10-07 15:47:53.805147183 +0000 UTC m=+3550.780325764" watchObservedRunningTime="2025-10-07 15:47:53.808594452 +0000 UTC m=+3550.783773043" Oct 07 15:47:56 crc kubenswrapper[4672]: E1007 15:47:56.172330 4672 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.122:36760->38.129.56.122:34883: write tcp 38.129.56.122:36760->38.129.56.122:34883: write: broken pipe Oct 07 15:47:56 crc kubenswrapper[4672]: I1007 15:47:56.650288 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:47:56 crc kubenswrapper[4672]: I1007 15:47:56.650342 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:47:56 crc kubenswrapper[4672]: I1007 15:47:56.813001 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-krr2z/crc-debug-25r79"] Oct 07 15:47:56 crc kubenswrapper[4672]: I1007 15:47:56.815162 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:47:56 crc kubenswrapper[4672]: I1007 15:47:56.952450 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a44288-fa23-4b46-b598-cd04581b7329-host\") pod \"crc-debug-25r79\" (UID: \"84a44288-fa23-4b46-b598-cd04581b7329\") " pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:47:56 crc kubenswrapper[4672]: I1007 15:47:56.952766 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zmt8\" (UniqueName: \"kubernetes.io/projected/84a44288-fa23-4b46-b598-cd04581b7329-kube-api-access-2zmt8\") pod \"crc-debug-25r79\" (UID: \"84a44288-fa23-4b46-b598-cd04581b7329\") " pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:47:57 crc kubenswrapper[4672]: I1007 15:47:57.054911 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a44288-fa23-4b46-b598-cd04581b7329-host\") pod \"crc-debug-25r79\" (UID: \"84a44288-fa23-4b46-b598-cd04581b7329\") " pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:47:57 crc kubenswrapper[4672]: I1007 15:47:57.054964 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zmt8\" (UniqueName: \"kubernetes.io/projected/84a44288-fa23-4b46-b598-cd04581b7329-kube-api-access-2zmt8\") pod \"crc-debug-25r79\" (UID: \"84a44288-fa23-4b46-b598-cd04581b7329\") " pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:47:57 crc kubenswrapper[4672]: I1007 15:47:57.055109 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a44288-fa23-4b46-b598-cd04581b7329-host\") pod \"crc-debug-25r79\" (UID: \"84a44288-fa23-4b46-b598-cd04581b7329\") " pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:47:57 crc kubenswrapper[4672]: I1007 15:47:57.076245 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zmt8\" (UniqueName: \"kubernetes.io/projected/84a44288-fa23-4b46-b598-cd04581b7329-kube-api-access-2zmt8\") pod \"crc-debug-25r79\" (UID: \"84a44288-fa23-4b46-b598-cd04581b7329\") " pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:47:57 crc kubenswrapper[4672]: I1007 15:47:57.141247 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:47:57 crc kubenswrapper[4672]: I1007 15:47:57.828689 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/crc-debug-25r79" event={"ID":"84a44288-fa23-4b46-b598-cd04581b7329","Type":"ContainerStarted","Data":"16abcf58d87587b3f7c569540ac17f75357b62b4df531c817b8d6cfc24fda20d"} Oct 07 15:48:09 crc kubenswrapper[4672]: I1007 15:48:09.948843 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/crc-debug-25r79" event={"ID":"84a44288-fa23-4b46-b598-cd04581b7329","Type":"ContainerStarted","Data":"04c66dc0d2773c8a474855854307f4ffe2ce3d84877d390af5bf2cbde50ed1df"} Oct 07 15:48:09 crc kubenswrapper[4672]: I1007 15:48:09.964903 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-krr2z/crc-debug-25r79" podStartSLOduration=1.9937715539999998 podStartE2EDuration="13.964880214s" podCreationTimestamp="2025-10-07 15:47:56 +0000 UTC" firstStartedPulling="2025-10-07 15:47:57.180106982 +0000 UTC m=+3554.155285563" lastFinishedPulling="2025-10-07 15:48:09.151215642 +0000 UTC m=+3566.126394223" observedRunningTime="2025-10-07 15:48:09.960545528 +0000 UTC m=+3566.935724109" watchObservedRunningTime="2025-10-07 15:48:09.964880214 +0000 UTC m=+3566.940058795" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.168611 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-46ld8"] Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.174469 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.191656 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-46ld8"] Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.271394 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-catalog-content\") pod \"community-operators-46ld8\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.271518 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-utilities\") pod \"community-operators-46ld8\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.271579 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smmcw\" (UniqueName: \"kubernetes.io/projected/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-kube-api-access-smmcw\") pod \"community-operators-46ld8\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.374406 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-catalog-content\") pod \"community-operators-46ld8\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.374617 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-utilities\") pod \"community-operators-46ld8\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.374761 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smmcw\" (UniqueName: \"kubernetes.io/projected/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-kube-api-access-smmcw\") pod \"community-operators-46ld8\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.375241 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-utilities\") pod \"community-operators-46ld8\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.375504 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-catalog-content\") pod \"community-operators-46ld8\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.401861 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smmcw\" (UniqueName: \"kubernetes.io/projected/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-kube-api-access-smmcw\") pod \"community-operators-46ld8\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:22 crc kubenswrapper[4672]: I1007 15:48:22.509912 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:23 crc kubenswrapper[4672]: I1007 15:48:23.129179 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-46ld8"] Oct 07 15:48:24 crc kubenswrapper[4672]: I1007 15:48:24.086799 4672 generic.go:334] "Generic (PLEG): container finished" podID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerID="c3587cfc56574c000a002bd8932f47fe9a80ba208971f5f99e30430d6448add8" exitCode=0 Oct 07 15:48:24 crc kubenswrapper[4672]: I1007 15:48:24.086916 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ld8" event={"ID":"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55","Type":"ContainerDied","Data":"c3587cfc56574c000a002bd8932f47fe9a80ba208971f5f99e30430d6448add8"} Oct 07 15:48:24 crc kubenswrapper[4672]: I1007 15:48:24.087317 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ld8" event={"ID":"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55","Type":"ContainerStarted","Data":"7e15267f1893396d48ea0ba200a1b0fd084ed09cf3ef7608bab0e890e4bd4a31"} Oct 07 15:48:26 crc kubenswrapper[4672]: I1007 15:48:26.116330 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ld8" event={"ID":"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55","Type":"ContainerStarted","Data":"46fc3e134555cd9adc6f0286b81317129f3145a142503cb3ec4a7b9338b3c21e"} Oct 07 15:48:26 crc kubenswrapper[4672]: I1007 15:48:26.651009 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:48:26 crc kubenswrapper[4672]: I1007 15:48:26.651098 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:48:29 crc kubenswrapper[4672]: I1007 15:48:29.148302 4672 generic.go:334] "Generic (PLEG): container finished" podID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerID="46fc3e134555cd9adc6f0286b81317129f3145a142503cb3ec4a7b9338b3c21e" exitCode=0 Oct 07 15:48:29 crc kubenswrapper[4672]: I1007 15:48:29.148438 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ld8" event={"ID":"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55","Type":"ContainerDied","Data":"46fc3e134555cd9adc6f0286b81317129f3145a142503cb3ec4a7b9338b3c21e"} Oct 07 15:48:30 crc kubenswrapper[4672]: I1007 15:48:30.158062 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ld8" event={"ID":"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55","Type":"ContainerStarted","Data":"94ed1e1d470d42194d3caa386d5b74b1c70d72719b6a63d217a1dd2cd992df57"} Oct 07 15:48:30 crc kubenswrapper[4672]: I1007 15:48:30.184406 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-46ld8" podStartSLOduration=2.430685782 podStartE2EDuration="8.184380314s" podCreationTimestamp="2025-10-07 15:48:22 +0000 UTC" firstStartedPulling="2025-10-07 15:48:24.089594199 +0000 UTC m=+3581.064772770" lastFinishedPulling="2025-10-07 15:48:29.843288721 +0000 UTC m=+3586.818467302" observedRunningTime="2025-10-07 15:48:30.172976404 +0000 UTC m=+3587.148155005" watchObservedRunningTime="2025-10-07 15:48:30.184380314 +0000 UTC m=+3587.159558895" Oct 07 15:48:32 crc kubenswrapper[4672]: I1007 15:48:32.511450 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:32 crc kubenswrapper[4672]: I1007 15:48:32.512503 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:48:33 crc kubenswrapper[4672]: I1007 15:48:33.566753 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-46ld8" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="registry-server" probeResult="failure" output=< Oct 07 15:48:33 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 15:48:33 crc kubenswrapper[4672]: > Oct 07 15:48:43 crc kubenswrapper[4672]: I1007 15:48:43.650177 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-46ld8" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="registry-server" probeResult="failure" output=< Oct 07 15:48:43 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 15:48:43 crc kubenswrapper[4672]: > Oct 07 15:48:53 crc kubenswrapper[4672]: I1007 15:48:53.565671 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-46ld8" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="registry-server" probeResult="failure" output=< Oct 07 15:48:53 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 15:48:53 crc kubenswrapper[4672]: > Oct 07 15:48:56 crc kubenswrapper[4672]: I1007 15:48:56.650307 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:48:56 crc kubenswrapper[4672]: I1007 15:48:56.650886 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:48:56 crc kubenswrapper[4672]: I1007 15:48:56.650940 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:48:56 crc kubenswrapper[4672]: I1007 15:48:56.651897 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:48:56 crc kubenswrapper[4672]: I1007 15:48:56.651951 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" gracePeriod=600 Oct 07 15:48:56 crc kubenswrapper[4672]: E1007 15:48:56.802723 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:48:57 crc kubenswrapper[4672]: I1007 15:48:57.425450 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" exitCode=0 Oct 07 15:48:57 crc kubenswrapper[4672]: I1007 15:48:57.425528 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16"} Oct 07 15:48:57 crc kubenswrapper[4672]: I1007 15:48:57.425599 4672 scope.go:117] "RemoveContainer" containerID="03cdb6f5990dbe334088174cddcb0a089754968450536b552fee6703a9376196" Oct 07 15:48:57 crc kubenswrapper[4672]: I1007 15:48:57.426609 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:48:57 crc kubenswrapper[4672]: E1007 15:48:57.426939 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:49:02 crc kubenswrapper[4672]: I1007 15:49:02.284352 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6d9d7bb546-5l2xz_a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4/barbican-api/0.log" Oct 07 15:49:02 crc kubenswrapper[4672]: I1007 15:49:02.352928 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6d9d7bb546-5l2xz_a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4/barbican-api-log/0.log" Oct 07 15:49:02 crc kubenswrapper[4672]: I1007 15:49:02.561223 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:49:02 crc kubenswrapper[4672]: I1007 15:49:02.573534 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-f4cdcdbb-4hkrg_dd7a9076-c512-4e9c-b626-482122ee920b/barbican-keystone-listener/0.log" Oct 07 15:49:02 crc kubenswrapper[4672]: I1007 15:49:02.621697 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:49:02 crc kubenswrapper[4672]: I1007 15:49:02.728598 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-f4cdcdbb-4hkrg_dd7a9076-c512-4e9c-b626-482122ee920b/barbican-keystone-listener-log/0.log" Oct 07 15:49:02 crc kubenswrapper[4672]: I1007 15:49:02.776594 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-fbf4c46f7-rxrcl_79138985-a31b-43fd-aec5-cfd8abf453d1/barbican-worker/0.log" Oct 07 15:49:02 crc kubenswrapper[4672]: I1007 15:49:02.798340 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-46ld8"] Oct 07 15:49:02 crc kubenswrapper[4672]: I1007 15:49:02.927792 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-fbf4c46f7-rxrcl_79138985-a31b-43fd-aec5-cfd8abf453d1/barbican-worker-log/0.log" Oct 07 15:49:03 crc kubenswrapper[4672]: I1007 15:49:03.047520 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w_4a1d79b4-a176-48af-9c78-59c7ddd39b71/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:03 crc kubenswrapper[4672]: I1007 15:49:03.263250 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_35a2c542-5b6e-4828-a670-fc4345572dc0/ceilometer-central-agent/0.log" Oct 07 15:49:03 crc kubenswrapper[4672]: I1007 15:49:03.285260 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_35a2c542-5b6e-4828-a670-fc4345572dc0/ceilometer-notification-agent/0.log" Oct 07 15:49:03 crc kubenswrapper[4672]: I1007 15:49:03.449779 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_35a2c542-5b6e-4828-a670-fc4345572dc0/proxy-httpd/0.log" Oct 07 15:49:03 crc kubenswrapper[4672]: I1007 15:49:03.477219 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_35a2c542-5b6e-4828-a670-fc4345572dc0/sg-core/0.log" Oct 07 15:49:03 crc kubenswrapper[4672]: I1007 15:49:03.702702 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cac7576e-f243-46c2-90cb-e62d4c822d81/cinder-api-log/0.log" Oct 07 15:49:03 crc kubenswrapper[4672]: I1007 15:49:03.719936 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cac7576e-f243-46c2-90cb-e62d4c822d81/cinder-api/0.log" Oct 07 15:49:03 crc kubenswrapper[4672]: I1007 15:49:03.960715 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_133e6c33-5248-4463-9a3b-75431b468373/cinder-scheduler/0.log" Oct 07 15:49:03 crc kubenswrapper[4672]: I1007 15:49:03.971678 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_133e6c33-5248-4463-9a3b-75431b468373/probe/0.log" Oct 07 15:49:04 crc kubenswrapper[4672]: I1007 15:49:04.205375 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z_a9f7823a-974b-4ef1-9414-f1aac7bd2179/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:04 crc kubenswrapper[4672]: I1007 15:49:04.266619 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-df6lz_fa45d7c4-d068-4df1-847b-589c7061b6e1/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:04 crc kubenswrapper[4672]: I1007 15:49:04.503302 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj_6d5fecb8-bc89-44d0-9413-4ab72d34390a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:04 crc kubenswrapper[4672]: I1007 15:49:04.512702 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-46ld8" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="registry-server" containerID="cri-o://94ed1e1d470d42194d3caa386d5b74b1c70d72719b6a63d217a1dd2cd992df57" gracePeriod=2 Oct 07 15:49:04 crc kubenswrapper[4672]: I1007 15:49:04.844761 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-599dd5cb59-whmjd_2645864b-ab57-47a0-8b17-478a93a55a7a/init/0.log" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.203133 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-599dd5cb59-whmjd_2645864b-ab57-47a0-8b17-478a93a55a7a/init/0.log" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.206193 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-599dd5cb59-whmjd_2645864b-ab57-47a0-8b17-478a93a55a7a/dnsmasq-dns/0.log" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.528442 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-5txmn_fa845c6c-c027-483f-b5e8-404778f6a1d4/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.529139 4672 generic.go:334] "Generic (PLEG): container finished" podID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerID="94ed1e1d470d42194d3caa386d5b74b1c70d72719b6a63d217a1dd2cd992df57" exitCode=0 Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.529172 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ld8" event={"ID":"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55","Type":"ContainerDied","Data":"94ed1e1d470d42194d3caa386d5b74b1c70d72719b6a63d217a1dd2cd992df57"} Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.530096 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ld8" event={"ID":"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55","Type":"ContainerDied","Data":"7e15267f1893396d48ea0ba200a1b0fd084ed09cf3ef7608bab0e890e4bd4a31"} Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.530178 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e15267f1893396d48ea0ba200a1b0fd084ed09cf3ef7608bab0e890e4bd4a31" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.563648 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e66032dd-633c-4fa7-b39c-714c4c799aed/glance-httpd/0.log" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.579004 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.655000 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-catalog-content\") pod \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.655220 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-utilities\") pod \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.655365 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smmcw\" (UniqueName: \"kubernetes.io/projected/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-kube-api-access-smmcw\") pod \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\" (UID: \"9ed3ff0a-74e4-4535-9b15-8f7f9963ad55\") " Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.656285 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-utilities" (OuterVolumeSpecName: "utilities") pod "9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" (UID: "9ed3ff0a-74e4-4535-9b15-8f7f9963ad55"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.672480 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-kube-api-access-smmcw" (OuterVolumeSpecName: "kube-api-access-smmcw") pod "9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" (UID: "9ed3ff0a-74e4-4535-9b15-8f7f9963ad55"). InnerVolumeSpecName "kube-api-access-smmcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.758369 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.758722 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smmcw\" (UniqueName: \"kubernetes.io/projected/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-kube-api-access-smmcw\") on node \"crc\" DevicePath \"\"" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.758790 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" (UID: "9ed3ff0a-74e4-4535-9b15-8f7f9963ad55"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.802710 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e66032dd-633c-4fa7-b39c-714c4c799aed/glance-log/0.log" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.824904 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_180e3a93-5be7-42c2-832c-86e29fb5444d/glance-httpd/0.log" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.860338 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:49:05 crc kubenswrapper[4672]: I1007 15:49:05.941850 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_180e3a93-5be7-42c2-832c-86e29fb5444d/glance-log/0.log" Oct 07 15:49:06 crc kubenswrapper[4672]: I1007 15:49:06.125049 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-694678548d-bbtxt_d3c1e726-5fce-4f95-952f-effb9a8993f3/horizon/0.log" Oct 07 15:49:06 crc kubenswrapper[4672]: I1007 15:49:06.363875 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj_be75457c-c14d-4827-850e-2619993cc1f6/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:06 crc kubenswrapper[4672]: I1007 15:49:06.404711 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-694678548d-bbtxt_d3c1e726-5fce-4f95-952f-effb9a8993f3/horizon-log/0.log" Oct 07 15:49:06 crc kubenswrapper[4672]: I1007 15:49:06.536650 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46ld8" Oct 07 15:49:06 crc kubenswrapper[4672]: I1007 15:49:06.548455 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-x9l79_3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:06 crc kubenswrapper[4672]: I1007 15:49:06.563958 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-46ld8"] Oct 07 15:49:06 crc kubenswrapper[4672]: I1007 15:49:06.576770 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-46ld8"] Oct 07 15:49:06 crc kubenswrapper[4672]: I1007 15:49:06.826420 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5858475b48-6sdfp_718ff39a-552f-48a0-91f4-c9fa07c17b7d/keystone-api/0.log" Oct 07 15:49:06 crc kubenswrapper[4672]: I1007 15:49:06.880864 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_0eb79c67-08e6-4bfc-9b12-333500d26d9c/kube-state-metrics/0.log" Oct 07 15:49:07 crc kubenswrapper[4672]: I1007 15:49:07.066293 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-wstfd_0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:07 crc kubenswrapper[4672]: I1007 15:49:07.432792 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6c7bf59845-qwdkz_fdb59d6a-51ea-4daa-904b-54a9a1af23f7/neutron-httpd/0.log" Oct 07 15:49:07 crc kubenswrapper[4672]: I1007 15:49:07.471151 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6c7bf59845-qwdkz_fdb59d6a-51ea-4daa-904b-54a9a1af23f7/neutron-api/0.log" Oct 07 15:49:07 crc kubenswrapper[4672]: I1007 15:49:07.664361 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7_ebc0c220-98a0-4285-89ec-689749e5f16b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:07 crc kubenswrapper[4672]: I1007 15:49:07.907559 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" path="/var/lib/kubelet/pods/9ed3ff0a-74e4-4535-9b15-8f7f9963ad55/volumes" Oct 07 15:49:08 crc kubenswrapper[4672]: I1007 15:49:08.326123 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3/nova-api-log/0.log" Oct 07 15:49:08 crc kubenswrapper[4672]: I1007 15:49:08.530566 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_34190ed8-8a54-40ab-b9d4-85f48bc24ee9/nova-cell0-conductor-conductor/0.log" Oct 07 15:49:08 crc kubenswrapper[4672]: I1007 15:49:08.553830 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3/nova-api-api/0.log" Oct 07 15:49:08 crc kubenswrapper[4672]: I1007 15:49:08.938188 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_696b0eb7-d73f-4a68-a991-d9af3b74fea2/nova-cell1-novncproxy-novncproxy/0.log" Oct 07 15:49:08 crc kubenswrapper[4672]: I1007 15:49:08.947638 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_38be140b-8203-4f2a-8215-6c76db50244e/nova-cell1-conductor-conductor/0.log" Oct 07 15:49:09 crc kubenswrapper[4672]: I1007 15:49:09.220928 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-2s7mn_be41884e-d5f3-42e7-bd95-3c04629e26e3/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:09 crc kubenswrapper[4672]: I1007 15:49:09.391095 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_050ad2ed-cc5a-41bb-a2f9-437deee02938/nova-metadata-log/0.log" Oct 07 15:49:09 crc kubenswrapper[4672]: I1007 15:49:09.862723 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_21b9d12a-9d4c-49c3-8e94-80d538f4853a/nova-scheduler-scheduler/0.log" Oct 07 15:49:10 crc kubenswrapper[4672]: I1007 15:49:10.038146 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6331c4be-e7ca-4786-9ac8-3aac826906e0/mysql-bootstrap/0.log" Oct 07 15:49:10 crc kubenswrapper[4672]: I1007 15:49:10.281274 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6331c4be-e7ca-4786-9ac8-3aac826906e0/mysql-bootstrap/0.log" Oct 07 15:49:10 crc kubenswrapper[4672]: I1007 15:49:10.341402 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6331c4be-e7ca-4786-9ac8-3aac826906e0/galera/0.log" Oct 07 15:49:10 crc kubenswrapper[4672]: I1007 15:49:10.615679 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3168983-5af6-4e27-a5f0-23c80d627c0a/mysql-bootstrap/0.log" Oct 07 15:49:10 crc kubenswrapper[4672]: I1007 15:49:10.829679 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3168983-5af6-4e27-a5f0-23c80d627c0a/mysql-bootstrap/0.log" Oct 07 15:49:10 crc kubenswrapper[4672]: I1007 15:49:10.893421 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:49:10 crc kubenswrapper[4672]: E1007 15:49:10.893764 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:49:10 crc kubenswrapper[4672]: I1007 15:49:10.923430 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3168983-5af6-4e27-a5f0-23c80d627c0a/galera/0.log" Oct 07 15:49:10 crc kubenswrapper[4672]: I1007 15:49:10.937916 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_050ad2ed-cc5a-41bb-a2f9-437deee02938/nova-metadata-metadata/0.log" Oct 07 15:49:11 crc kubenswrapper[4672]: I1007 15:49:11.267498 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_452e785c-16f5-478c-8c52-638692cd3abd/openstackclient/0.log" Oct 07 15:49:11 crc kubenswrapper[4672]: I1007 15:49:11.343458 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-djrzm_76c480b3-ae62-4b55-b055-d0c1c0ff5777/openstack-network-exporter/0.log" Oct 07 15:49:11 crc kubenswrapper[4672]: I1007 15:49:11.568347 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5p5sl_73afc1bf-c6d8-4d61-b050-db75d0cd219f/ovsdb-server-init/0.log" Oct 07 15:49:11 crc kubenswrapper[4672]: I1007 15:49:11.722370 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5p5sl_73afc1bf-c6d8-4d61-b050-db75d0cd219f/ovsdb-server-init/0.log" Oct 07 15:49:11 crc kubenswrapper[4672]: I1007 15:49:11.812144 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5p5sl_73afc1bf-c6d8-4d61-b050-db75d0cd219f/ovs-vswitchd/0.log" Oct 07 15:49:11 crc kubenswrapper[4672]: I1007 15:49:11.826553 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5p5sl_73afc1bf-c6d8-4d61-b050-db75d0cd219f/ovsdb-server/0.log" Oct 07 15:49:12 crc kubenswrapper[4672]: I1007 15:49:12.031623 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-tzsfl_0e4d1227-361f-4c04-b0ce-12295f021364/ovn-controller/0.log" Oct 07 15:49:12 crc kubenswrapper[4672]: I1007 15:49:12.286181 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-lngw9_04f4d7c8-1cca-4233-9cc5-dfa205f89c49/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:12 crc kubenswrapper[4672]: I1007 15:49:12.393886 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8b2cc7fd-952a-4115-a4e4-408bfc75a54d/openstack-network-exporter/0.log" Oct 07 15:49:12 crc kubenswrapper[4672]: I1007 15:49:12.538961 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8b2cc7fd-952a-4115-a4e4-408bfc75a54d/ovn-northd/0.log" Oct 07 15:49:12 crc kubenswrapper[4672]: I1007 15:49:12.628529 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6a4f76b5-fd91-462b-9e85-9c66d83ab353/openstack-network-exporter/0.log" Oct 07 15:49:12 crc kubenswrapper[4672]: I1007 15:49:12.788517 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6a4f76b5-fd91-462b-9e85-9c66d83ab353/ovsdbserver-nb/0.log" Oct 07 15:49:12 crc kubenswrapper[4672]: I1007 15:49:12.888306 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_16ba9419-35b8-45df-bf64-86d144d67284/openstack-network-exporter/0.log" Oct 07 15:49:13 crc kubenswrapper[4672]: I1007 15:49:13.008777 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_16ba9419-35b8-45df-bf64-86d144d67284/ovsdbserver-sb/0.log" Oct 07 15:49:13 crc kubenswrapper[4672]: I1007 15:49:13.199873 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-78768fbc98-wqtlt_8ee6c49e-c514-4798-ab81-0bc31c59048d/placement-api/0.log" Oct 07 15:49:13 crc kubenswrapper[4672]: I1007 15:49:13.346337 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-78768fbc98-wqtlt_8ee6c49e-c514-4798-ab81-0bc31c59048d/placement-log/0.log" Oct 07 15:49:13 crc kubenswrapper[4672]: I1007 15:49:13.407854 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_60a532ee-7772-464b-8f0b-854377647a20/setup-container/0.log" Oct 07 15:49:13 crc kubenswrapper[4672]: I1007 15:49:13.713642 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_60a532ee-7772-464b-8f0b-854377647a20/setup-container/0.log" Oct 07 15:49:13 crc kubenswrapper[4672]: I1007 15:49:13.731207 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_60a532ee-7772-464b-8f0b-854377647a20/rabbitmq/0.log" Oct 07 15:49:13 crc kubenswrapper[4672]: I1007 15:49:13.945929 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6915154f-a1f8-4f93-8a8b-00020a761f95/setup-container/0.log" Oct 07 15:49:14 crc kubenswrapper[4672]: I1007 15:49:14.113903 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6915154f-a1f8-4f93-8a8b-00020a761f95/setup-container/0.log" Oct 07 15:49:14 crc kubenswrapper[4672]: I1007 15:49:14.136091 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6915154f-a1f8-4f93-8a8b-00020a761f95/rabbitmq/0.log" Oct 07 15:49:14 crc kubenswrapper[4672]: I1007 15:49:14.392030 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f_c1ded63b-448b-4e94-9c53-e87268f775ac/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:14 crc kubenswrapper[4672]: I1007 15:49:14.503783 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-lrt9s_1861ff34-6944-4cea-950f-8efc95e05f1a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:14 crc kubenswrapper[4672]: I1007 15:49:14.695970 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w_fa79d159-5cc6-405e-8725-d3f49e9f75f8/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:14 crc kubenswrapper[4672]: I1007 15:49:14.898420 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-mr4zp_414dfaec-a340-4080-b3e7-da4966078c60/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:15 crc kubenswrapper[4672]: I1007 15:49:15.036651 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-mslxh_b69369f3-e53e-4aea-ab35-dcc1e9f3d56f/ssh-known-hosts-edpm-deployment/0.log" Oct 07 15:49:15 crc kubenswrapper[4672]: I1007 15:49:15.256297 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6d96d8478f-sx526_e7406b25-bb39-409b-bde4-75cc32bf4ae2/proxy-server/0.log" Oct 07 15:49:15 crc kubenswrapper[4672]: I1007 15:49:15.313584 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6d96d8478f-sx526_e7406b25-bb39-409b-bde4-75cc32bf4ae2/proxy-httpd/0.log" Oct 07 15:49:15 crc kubenswrapper[4672]: I1007 15:49:15.473261 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-r4b6l_a57e275b-ff63-4284-aae2-7fbc858c0128/swift-ring-rebalance/0.log" Oct 07 15:49:15 crc kubenswrapper[4672]: I1007 15:49:15.677727 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/account-auditor/0.log" Oct 07 15:49:15 crc kubenswrapper[4672]: I1007 15:49:15.709321 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/account-reaper/0.log" Oct 07 15:49:15 crc kubenswrapper[4672]: I1007 15:49:15.815489 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/account-replicator/0.log" Oct 07 15:49:15 crc kubenswrapper[4672]: I1007 15:49:15.897117 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/account-server/0.log" Oct 07 15:49:15 crc kubenswrapper[4672]: I1007 15:49:15.958603 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/container-auditor/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.114442 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/container-replicator/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.135782 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/container-server/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.224939 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/container-updater/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.374833 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-auditor/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.445053 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-expirer/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.487647 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-replicator/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.636128 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-server/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.653050 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-updater/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.724292 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/rsync/0.log" Oct 07 15:49:16 crc kubenswrapper[4672]: I1007 15:49:16.881576 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/swift-recon-cron/0.log" Oct 07 15:49:17 crc kubenswrapper[4672]: I1007 15:49:17.033317 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-jktl2_a7d08d2c-7a70-47f3-afa0-f93d34efd7dc/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:17 crc kubenswrapper[4672]: I1007 15:49:17.178268 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_5104b964-0b2d-4b1f-920b-a0b3016ed885/tempest-tests-tempest-tests-runner/0.log" Oct 07 15:49:17 crc kubenswrapper[4672]: I1007 15:49:17.337451 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_91f7fb17-f404-4b7b-9e67-4b908e341901/test-operator-logs-container/0.log" Oct 07 15:49:17 crc kubenswrapper[4672]: I1007 15:49:17.790889 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv_6b18bc85-3a86-4989-a2b2-dd34d127023a/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:49:21 crc kubenswrapper[4672]: I1007 15:49:21.894172 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:49:21 crc kubenswrapper[4672]: E1007 15:49:21.895302 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:49:24 crc kubenswrapper[4672]: I1007 15:49:24.618571 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_09d93eef-fed4-4023-80a4-8927b2631580/memcached/0.log" Oct 07 15:49:35 crc kubenswrapper[4672]: I1007 15:49:35.892596 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:49:35 crc kubenswrapper[4672]: E1007 15:49:35.893926 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:49:48 crc kubenswrapper[4672]: I1007 15:49:48.892536 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:49:48 crc kubenswrapper[4672]: E1007 15:49:48.893316 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:50:02 crc kubenswrapper[4672]: I1007 15:50:02.892747 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:50:02 crc kubenswrapper[4672]: E1007 15:50:02.894177 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:50:08 crc kubenswrapper[4672]: I1007 15:50:08.205299 4672 generic.go:334] "Generic (PLEG): container finished" podID="84a44288-fa23-4b46-b598-cd04581b7329" containerID="04c66dc0d2773c8a474855854307f4ffe2ce3d84877d390af5bf2cbde50ed1df" exitCode=0 Oct 07 15:50:08 crc kubenswrapper[4672]: I1007 15:50:08.205390 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/crc-debug-25r79" event={"ID":"84a44288-fa23-4b46-b598-cd04581b7329","Type":"ContainerDied","Data":"04c66dc0d2773c8a474855854307f4ffe2ce3d84877d390af5bf2cbde50ed1df"} Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.341708 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.380104 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-krr2z/crc-debug-25r79"] Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.388199 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-krr2z/crc-debug-25r79"] Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.434705 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zmt8\" (UniqueName: \"kubernetes.io/projected/84a44288-fa23-4b46-b598-cd04581b7329-kube-api-access-2zmt8\") pod \"84a44288-fa23-4b46-b598-cd04581b7329\" (UID: \"84a44288-fa23-4b46-b598-cd04581b7329\") " Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.434873 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a44288-fa23-4b46-b598-cd04581b7329-host\") pod \"84a44288-fa23-4b46-b598-cd04581b7329\" (UID: \"84a44288-fa23-4b46-b598-cd04581b7329\") " Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.435279 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84a44288-fa23-4b46-b598-cd04581b7329-host" (OuterVolumeSpecName: "host") pod "84a44288-fa23-4b46-b598-cd04581b7329" (UID: "84a44288-fa23-4b46-b598-cd04581b7329"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.436063 4672 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a44288-fa23-4b46-b598-cd04581b7329-host\") on node \"crc\" DevicePath \"\"" Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.441220 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a44288-fa23-4b46-b598-cd04581b7329-kube-api-access-2zmt8" (OuterVolumeSpecName: "kube-api-access-2zmt8") pod "84a44288-fa23-4b46-b598-cd04581b7329" (UID: "84a44288-fa23-4b46-b598-cd04581b7329"). InnerVolumeSpecName "kube-api-access-2zmt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.537865 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zmt8\" (UniqueName: \"kubernetes.io/projected/84a44288-fa23-4b46-b598-cd04581b7329-kube-api-access-2zmt8\") on node \"crc\" DevicePath \"\"" Oct 07 15:50:09 crc kubenswrapper[4672]: I1007 15:50:09.902066 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a44288-fa23-4b46-b598-cd04581b7329" path="/var/lib/kubelet/pods/84a44288-fa23-4b46-b598-cd04581b7329/volumes" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.237983 4672 scope.go:117] "RemoveContainer" containerID="04c66dc0d2773c8a474855854307f4ffe2ce3d84877d390af5bf2cbde50ed1df" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.238045 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-25r79" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.573040 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-krr2z/crc-debug-zcxc9"] Oct 07 15:50:10 crc kubenswrapper[4672]: E1007 15:50:10.574115 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a44288-fa23-4b46-b598-cd04581b7329" containerName="container-00" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.574135 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a44288-fa23-4b46-b598-cd04581b7329" containerName="container-00" Oct 07 15:50:10 crc kubenswrapper[4672]: E1007 15:50:10.574157 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="extract-utilities" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.574163 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="extract-utilities" Oct 07 15:50:10 crc kubenswrapper[4672]: E1007 15:50:10.574174 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="extract-content" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.574180 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="extract-content" Oct 07 15:50:10 crc kubenswrapper[4672]: E1007 15:50:10.574188 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="registry-server" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.574194 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="registry-server" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.574394 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a44288-fa23-4b46-b598-cd04581b7329" containerName="container-00" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.574423 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ed3ff0a-74e4-4535-9b15-8f7f9963ad55" containerName="registry-server" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.575223 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.661971 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ceb5397-926c-4cb1-a423-fadb1ad7b687-host\") pod \"crc-debug-zcxc9\" (UID: \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\") " pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.662371 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drx54\" (UniqueName: \"kubernetes.io/projected/8ceb5397-926c-4cb1-a423-fadb1ad7b687-kube-api-access-drx54\") pod \"crc-debug-zcxc9\" (UID: \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\") " pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.765188 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drx54\" (UniqueName: \"kubernetes.io/projected/8ceb5397-926c-4cb1-a423-fadb1ad7b687-kube-api-access-drx54\") pod \"crc-debug-zcxc9\" (UID: \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\") " pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.765373 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ceb5397-926c-4cb1-a423-fadb1ad7b687-host\") pod \"crc-debug-zcxc9\" (UID: \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\") " pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.765570 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ceb5397-926c-4cb1-a423-fadb1ad7b687-host\") pod \"crc-debug-zcxc9\" (UID: \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\") " pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.786852 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drx54\" (UniqueName: \"kubernetes.io/projected/8ceb5397-926c-4cb1-a423-fadb1ad7b687-kube-api-access-drx54\") pod \"crc-debug-zcxc9\" (UID: \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\") " pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:10 crc kubenswrapper[4672]: I1007 15:50:10.895168 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:11 crc kubenswrapper[4672]: I1007 15:50:11.248829 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/crc-debug-zcxc9" event={"ID":"8ceb5397-926c-4cb1-a423-fadb1ad7b687","Type":"ContainerStarted","Data":"7fdaa3cbf9fc8b80eb2a567a2a7642e3e3a079a8c8759883c3e3fd895ce2e7b6"} Oct 07 15:50:11 crc kubenswrapper[4672]: I1007 15:50:11.249186 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/crc-debug-zcxc9" event={"ID":"8ceb5397-926c-4cb1-a423-fadb1ad7b687","Type":"ContainerStarted","Data":"5589efb2882628676d493fdd641aeb65e24bc2cbc379409355eed74b9f68f392"} Oct 07 15:50:11 crc kubenswrapper[4672]: I1007 15:50:11.267935 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-krr2z/crc-debug-zcxc9" podStartSLOduration=1.267914381 podStartE2EDuration="1.267914381s" podCreationTimestamp="2025-10-07 15:50:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:50:11.260580829 +0000 UTC m=+3688.235759430" watchObservedRunningTime="2025-10-07 15:50:11.267914381 +0000 UTC m=+3688.243092962" Oct 07 15:50:12 crc kubenswrapper[4672]: I1007 15:50:12.258815 4672 generic.go:334] "Generic (PLEG): container finished" podID="8ceb5397-926c-4cb1-a423-fadb1ad7b687" containerID="7fdaa3cbf9fc8b80eb2a567a2a7642e3e3a079a8c8759883c3e3fd895ce2e7b6" exitCode=0 Oct 07 15:50:12 crc kubenswrapper[4672]: I1007 15:50:12.261251 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/crc-debug-zcxc9" event={"ID":"8ceb5397-926c-4cb1-a423-fadb1ad7b687","Type":"ContainerDied","Data":"7fdaa3cbf9fc8b80eb2a567a2a7642e3e3a079a8c8759883c3e3fd895ce2e7b6"} Oct 07 15:50:13 crc kubenswrapper[4672]: I1007 15:50:13.377593 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:13 crc kubenswrapper[4672]: I1007 15:50:13.518122 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drx54\" (UniqueName: \"kubernetes.io/projected/8ceb5397-926c-4cb1-a423-fadb1ad7b687-kube-api-access-drx54\") pod \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\" (UID: \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\") " Oct 07 15:50:13 crc kubenswrapper[4672]: I1007 15:50:13.518268 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ceb5397-926c-4cb1-a423-fadb1ad7b687-host\") pod \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\" (UID: \"8ceb5397-926c-4cb1-a423-fadb1ad7b687\") " Oct 07 15:50:13 crc kubenswrapper[4672]: I1007 15:50:13.518649 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ceb5397-926c-4cb1-a423-fadb1ad7b687-host" (OuterVolumeSpecName: "host") pod "8ceb5397-926c-4cb1-a423-fadb1ad7b687" (UID: "8ceb5397-926c-4cb1-a423-fadb1ad7b687"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:50:13 crc kubenswrapper[4672]: I1007 15:50:13.519321 4672 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ceb5397-926c-4cb1-a423-fadb1ad7b687-host\") on node \"crc\" DevicePath \"\"" Oct 07 15:50:13 crc kubenswrapper[4672]: I1007 15:50:13.525550 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ceb5397-926c-4cb1-a423-fadb1ad7b687-kube-api-access-drx54" (OuterVolumeSpecName: "kube-api-access-drx54") pod "8ceb5397-926c-4cb1-a423-fadb1ad7b687" (UID: "8ceb5397-926c-4cb1-a423-fadb1ad7b687"). InnerVolumeSpecName "kube-api-access-drx54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:50:13 crc kubenswrapper[4672]: I1007 15:50:13.620496 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drx54\" (UniqueName: \"kubernetes.io/projected/8ceb5397-926c-4cb1-a423-fadb1ad7b687-kube-api-access-drx54\") on node \"crc\" DevicePath \"\"" Oct 07 15:50:14 crc kubenswrapper[4672]: I1007 15:50:14.285049 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/crc-debug-zcxc9" event={"ID":"8ceb5397-926c-4cb1-a423-fadb1ad7b687","Type":"ContainerDied","Data":"5589efb2882628676d493fdd641aeb65e24bc2cbc379409355eed74b9f68f392"} Oct 07 15:50:14 crc kubenswrapper[4672]: I1007 15:50:14.285106 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5589efb2882628676d493fdd641aeb65e24bc2cbc379409355eed74b9f68f392" Oct 07 15:50:14 crc kubenswrapper[4672]: I1007 15:50:14.285123 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-zcxc9" Oct 07 15:50:17 crc kubenswrapper[4672]: I1007 15:50:17.892342 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:50:17 crc kubenswrapper[4672]: E1007 15:50:17.893704 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:50:18 crc kubenswrapper[4672]: I1007 15:50:18.098001 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-krr2z/crc-debug-zcxc9"] Oct 07 15:50:18 crc kubenswrapper[4672]: I1007 15:50:18.107149 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-krr2z/crc-debug-zcxc9"] Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.284162 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-krr2z/crc-debug-79pvg"] Oct 07 15:50:19 crc kubenswrapper[4672]: E1007 15:50:19.285675 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ceb5397-926c-4cb1-a423-fadb1ad7b687" containerName="container-00" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.285713 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ceb5397-926c-4cb1-a423-fadb1ad7b687" containerName="container-00" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.285935 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ceb5397-926c-4cb1-a423-fadb1ad7b687" containerName="container-00" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.286771 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.426558 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4vcr\" (UniqueName: \"kubernetes.io/projected/870d9f99-5388-4ff1-ae59-050ebe8801de-kube-api-access-s4vcr\") pod \"crc-debug-79pvg\" (UID: \"870d9f99-5388-4ff1-ae59-050ebe8801de\") " pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.426628 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/870d9f99-5388-4ff1-ae59-050ebe8801de-host\") pod \"crc-debug-79pvg\" (UID: \"870d9f99-5388-4ff1-ae59-050ebe8801de\") " pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.529469 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4vcr\" (UniqueName: \"kubernetes.io/projected/870d9f99-5388-4ff1-ae59-050ebe8801de-kube-api-access-s4vcr\") pod \"crc-debug-79pvg\" (UID: \"870d9f99-5388-4ff1-ae59-050ebe8801de\") " pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.529550 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/870d9f99-5388-4ff1-ae59-050ebe8801de-host\") pod \"crc-debug-79pvg\" (UID: \"870d9f99-5388-4ff1-ae59-050ebe8801de\") " pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.529760 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/870d9f99-5388-4ff1-ae59-050ebe8801de-host\") pod \"crc-debug-79pvg\" (UID: \"870d9f99-5388-4ff1-ae59-050ebe8801de\") " pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.550959 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4vcr\" (UniqueName: \"kubernetes.io/projected/870d9f99-5388-4ff1-ae59-050ebe8801de-kube-api-access-s4vcr\") pod \"crc-debug-79pvg\" (UID: \"870d9f99-5388-4ff1-ae59-050ebe8801de\") " pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.607566 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:19 crc kubenswrapper[4672]: I1007 15:50:19.905334 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ceb5397-926c-4cb1-a423-fadb1ad7b687" path="/var/lib/kubelet/pods/8ceb5397-926c-4cb1-a423-fadb1ad7b687/volumes" Oct 07 15:50:20 crc kubenswrapper[4672]: I1007 15:50:20.341819 4672 generic.go:334] "Generic (PLEG): container finished" podID="870d9f99-5388-4ff1-ae59-050ebe8801de" containerID="00d111480735c3ccd3bb695b4415a3d04ea55af84c1441f9071a72fb06b9eda4" exitCode=0 Oct 07 15:50:20 crc kubenswrapper[4672]: I1007 15:50:20.341881 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/crc-debug-79pvg" event={"ID":"870d9f99-5388-4ff1-ae59-050ebe8801de","Type":"ContainerDied","Data":"00d111480735c3ccd3bb695b4415a3d04ea55af84c1441f9071a72fb06b9eda4"} Oct 07 15:50:20 crc kubenswrapper[4672]: I1007 15:50:20.341919 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/crc-debug-79pvg" event={"ID":"870d9f99-5388-4ff1-ae59-050ebe8801de","Type":"ContainerStarted","Data":"af2fc25708a4be72402176e026af9d1de4b3db6fa6721a89f0b0450b0b8b2701"} Oct 07 15:50:20 crc kubenswrapper[4672]: I1007 15:50:20.405104 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-krr2z/crc-debug-79pvg"] Oct 07 15:50:20 crc kubenswrapper[4672]: I1007 15:50:20.415637 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-krr2z/crc-debug-79pvg"] Oct 07 15:50:21 crc kubenswrapper[4672]: I1007 15:50:21.497161 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:21 crc kubenswrapper[4672]: I1007 15:50:21.569491 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4vcr\" (UniqueName: \"kubernetes.io/projected/870d9f99-5388-4ff1-ae59-050ebe8801de-kube-api-access-s4vcr\") pod \"870d9f99-5388-4ff1-ae59-050ebe8801de\" (UID: \"870d9f99-5388-4ff1-ae59-050ebe8801de\") " Oct 07 15:50:21 crc kubenswrapper[4672]: I1007 15:50:21.569624 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/870d9f99-5388-4ff1-ae59-050ebe8801de-host\") pod \"870d9f99-5388-4ff1-ae59-050ebe8801de\" (UID: \"870d9f99-5388-4ff1-ae59-050ebe8801de\") " Oct 07 15:50:21 crc kubenswrapper[4672]: I1007 15:50:21.570140 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/870d9f99-5388-4ff1-ae59-050ebe8801de-host" (OuterVolumeSpecName: "host") pod "870d9f99-5388-4ff1-ae59-050ebe8801de" (UID: "870d9f99-5388-4ff1-ae59-050ebe8801de"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:50:21 crc kubenswrapper[4672]: I1007 15:50:21.570521 4672 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/870d9f99-5388-4ff1-ae59-050ebe8801de-host\") on node \"crc\" DevicePath \"\"" Oct 07 15:50:21 crc kubenswrapper[4672]: I1007 15:50:21.580606 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/870d9f99-5388-4ff1-ae59-050ebe8801de-kube-api-access-s4vcr" (OuterVolumeSpecName: "kube-api-access-s4vcr") pod "870d9f99-5388-4ff1-ae59-050ebe8801de" (UID: "870d9f99-5388-4ff1-ae59-050ebe8801de"). InnerVolumeSpecName "kube-api-access-s4vcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:50:21 crc kubenswrapper[4672]: I1007 15:50:21.672947 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4vcr\" (UniqueName: \"kubernetes.io/projected/870d9f99-5388-4ff1-ae59-050ebe8801de-kube-api-access-s4vcr\") on node \"crc\" DevicePath \"\"" Oct 07 15:50:21 crc kubenswrapper[4672]: I1007 15:50:21.904130 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="870d9f99-5388-4ff1-ae59-050ebe8801de" path="/var/lib/kubelet/pods/870d9f99-5388-4ff1-ae59-050ebe8801de/volumes" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.040729 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/util/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.276571 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/pull/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.299625 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/util/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.300217 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/pull/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.367953 4672 scope.go:117] "RemoveContainer" containerID="00d111480735c3ccd3bb695b4415a3d04ea55af84c1441f9071a72fb06b9eda4" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.368135 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/crc-debug-79pvg" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.520270 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/extract/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.548338 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/pull/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.563970 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/util/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.697430 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-hvp85_b5f8d795-a31d-4992-99fc-590848eae6fd/kube-rbac-proxy/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.825932 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-jnwxm_d427841d-eba8-45b1-aa18-de4a5d1fecaa/kube-rbac-proxy/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.843312 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-hvp85_b5f8d795-a31d-4992-99fc-590848eae6fd/manager/0.log" Oct 07 15:50:22 crc kubenswrapper[4672]: I1007 15:50:22.946339 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-jnwxm_d427841d-eba8-45b1-aa18-de4a5d1fecaa/manager/0.log" Oct 07 15:50:23 crc kubenswrapper[4672]: I1007 15:50:23.021557 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-l4zjl_fba2eb6a-4cb0-4fc9-9625-e7a57382e412/kube-rbac-proxy/0.log" Oct 07 15:50:23 crc kubenswrapper[4672]: I1007 15:50:23.078941 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-l4zjl_fba2eb6a-4cb0-4fc9-9625-e7a57382e412/manager/0.log" Oct 07 15:50:23 crc kubenswrapper[4672]: I1007 15:50:23.265733 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-2mddh_a70458ed-18c5-49ef-8e30-83e39c3ec5e5/kube-rbac-proxy/0.log" Oct 07 15:50:23 crc kubenswrapper[4672]: I1007 15:50:23.276877 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-2mddh_a70458ed-18c5-49ef-8e30-83e39c3ec5e5/manager/0.log" Oct 07 15:50:23 crc kubenswrapper[4672]: I1007 15:50:23.580726 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-hbh5b_8c3d7854-7f93-46f3-aa4c-1c26dc987cbe/kube-rbac-proxy/0.log" Oct 07 15:50:23 crc kubenswrapper[4672]: I1007 15:50:23.659910 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-hbh5b_8c3d7854-7f93-46f3-aa4c-1c26dc987cbe/manager/0.log" Oct 07 15:50:23 crc kubenswrapper[4672]: I1007 15:50:23.758542 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-m7vrt_47db8f43-eb79-4338-88e1-1b464c8de306/kube-rbac-proxy/0.log" Oct 07 15:50:23 crc kubenswrapper[4672]: I1007 15:50:23.799861 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-m7vrt_47db8f43-eb79-4338-88e1-1b464c8de306/manager/0.log" Oct 07 15:50:23 crc kubenswrapper[4672]: I1007 15:50:23.871532 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-8f4wl_2186b39e-fac1-49ed-a0d3-d925a4a7c2e6/kube-rbac-proxy/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.049579 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-598db_8b98122f-1fe5-456b-9e60-e0ac676afbfc/kube-rbac-proxy/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.145839 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-8f4wl_2186b39e-fac1-49ed-a0d3-d925a4a7c2e6/manager/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.157144 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-598db_8b98122f-1fe5-456b-9e60-e0ac676afbfc/manager/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.383051 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-72pgl_255dad32-3ed4-49eb-8e4d-6cc40d83acc7/manager/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.389925 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-72pgl_255dad32-3ed4-49eb-8e4d-6cc40d83acc7/kube-rbac-proxy/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.512183 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-44jhh_764c99ab-d28b-4a93-b2e6-5abdef46cde8/kube-rbac-proxy/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.566202 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-44jhh_764c99ab-d28b-4a93-b2e6-5abdef46cde8/manager/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.668714 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq_00724d85-8a20-4114-9c19-10171b42d9d1/kube-rbac-proxy/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.734060 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq_00724d85-8a20-4114-9c19-10171b42d9d1/manager/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.852520 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-l2zj6_24fa96bf-c94d-4e2c-974a-d00f03de100d/kube-rbac-proxy/0.log" Oct 07 15:50:24 crc kubenswrapper[4672]: I1007 15:50:24.902081 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-l2zj6_24fa96bf-c94d-4e2c-974a-d00f03de100d/manager/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.010324 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-9rcgh_58058dbe-db1a-41b4-8643-21f790efaac3/kube-rbac-proxy/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.167078 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-9rcgh_58058dbe-db1a-41b4-8643-21f790efaac3/manager/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.170637 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-w89qd_8366fa08-0b1b-49f3-8ac1-7df869356e24/kube-rbac-proxy/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.207806 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-w89qd_8366fa08-0b1b-49f3-8ac1-7df869356e24/manager/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.356775 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46_51779ddc-1795-45cb-8ba8-8ac78b2c43c8/manager/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.375659 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46_51779ddc-1795-45cb-8ba8-8ac78b2c43c8/kube-rbac-proxy/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.517661 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-86dbb8967f-5bbbt_20bd297b-c47a-4b56-9581-4b4699b7d1d4/kube-rbac-proxy/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.602193 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7b5c677b9b-6dzk9_c2468e9c-5d8a-487f-8870-3b89f8c0e905/kube-rbac-proxy/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.873416 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7b5c677b9b-6dzk9_c2468e9c-5d8a-487f-8870-3b89f8c0e905/operator/0.log" Oct 07 15:50:25 crc kubenswrapper[4672]: I1007 15:50:25.907984 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-6zf4v_34e36355-636c-455e-a493-0ff6fe705d28/registry-server/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.064222 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-g8n44_c5d2b5d0-7471-41e8-a5f9-7930a07fb483/kube-rbac-proxy/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.145055 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-g8n44_c5d2b5d0-7471-41e8-a5f9-7930a07fb483/manager/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.164844 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-xzhc4_5422dad7-a0a4-4116-bee5-8e5580d50530/kube-rbac-proxy/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.345415 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-xzhc4_5422dad7-a0a4-4116-bee5-8e5580d50530/manager/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.388273 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-j8bct_2815da8e-e8ee-45b7-a971-b36721ba4322/operator/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.615283 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-9zgt5_eae78c95-e2b0-4fdb-8b01-bc446045704f/kube-rbac-proxy/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.623518 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-sfdxq_9c7d5ea5-33a0-4006-b116-8cba83443c79/kube-rbac-proxy/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.665859 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-9zgt5_eae78c95-e2b0-4fdb-8b01-bc446045704f/manager/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.856867 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-86dbb8967f-5bbbt_20bd297b-c47a-4b56-9581-4b4699b7d1d4/manager/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.866585 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-sfdxq_9c7d5ea5-33a0-4006-b116-8cba83443c79/manager/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.919193 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-2z4fk_a0710ae8-d5bc-4f95-a4ec-76128a3916bb/manager/0.log" Oct 07 15:50:26 crc kubenswrapper[4672]: I1007 15:50:26.951875 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-2z4fk_a0710ae8-d5bc-4f95-a4ec-76128a3916bb/kube-rbac-proxy/0.log" Oct 07 15:50:27 crc kubenswrapper[4672]: I1007 15:50:27.074384 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-27xdb_58c50b3b-c174-42d4-bcc1-d76b0a93cd58/kube-rbac-proxy/0.log" Oct 07 15:50:27 crc kubenswrapper[4672]: I1007 15:50:27.082938 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-27xdb_58c50b3b-c174-42d4-bcc1-d76b0a93cd58/manager/0.log" Oct 07 15:50:30 crc kubenswrapper[4672]: I1007 15:50:30.892107 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:50:30 crc kubenswrapper[4672]: E1007 15:50:30.893345 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:50:41 crc kubenswrapper[4672]: I1007 15:50:41.893762 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:50:41 crc kubenswrapper[4672]: E1007 15:50:41.894595 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:50:43 crc kubenswrapper[4672]: I1007 15:50:43.097530 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-dh89s_d26357e0-a31a-460e-94f0-3414790054e6/control-plane-machine-set-operator/0.log" Oct 07 15:50:43 crc kubenswrapper[4672]: I1007 15:50:43.275928 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-w45mb_93d4decb-aa9d-40aa-8e02-c6557c64aacb/machine-api-operator/0.log" Oct 07 15:50:43 crc kubenswrapper[4672]: I1007 15:50:43.278473 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-w45mb_93d4decb-aa9d-40aa-8e02-c6557c64aacb/kube-rbac-proxy/0.log" Oct 07 15:50:52 crc kubenswrapper[4672]: I1007 15:50:52.892230 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:50:52 crc kubenswrapper[4672]: E1007 15:50:52.892932 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:50:54 crc kubenswrapper[4672]: I1007 15:50:54.802936 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-vbw2d_9d1fa281-ffab-4623-a2cd-a5197c100d6c/cert-manager-controller/0.log" Oct 07 15:50:54 crc kubenswrapper[4672]: I1007 15:50:54.960385 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-4dzmr_ce160d50-48d0-433f-924a-7f6e08afbb0b/cert-manager-cainjector/0.log" Oct 07 15:50:54 crc kubenswrapper[4672]: I1007 15:50:54.994849 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-xccp8_303de91b-56fc-4579-b771-882f6ec5a53d/cert-manager-webhook/0.log" Oct 07 15:51:06 crc kubenswrapper[4672]: I1007 15:51:06.305918 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-qpp9q_c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e/nmstate-console-plugin/0.log" Oct 07 15:51:06 crc kubenswrapper[4672]: I1007 15:51:06.501646 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-2lmqd_011af584-1b04-4f6c-9cb1-48e1adac8d81/kube-rbac-proxy/0.log" Oct 07 15:51:06 crc kubenswrapper[4672]: I1007 15:51:06.503424 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-lbdvn_8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd/nmstate-handler/0.log" Oct 07 15:51:06 crc kubenswrapper[4672]: I1007 15:51:06.560217 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-2lmqd_011af584-1b04-4f6c-9cb1-48e1adac8d81/nmstate-metrics/0.log" Oct 07 15:51:06 crc kubenswrapper[4672]: I1007 15:51:06.667193 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-p47n6_128e3d01-038d-4b02-91f6-b50124ff721a/nmstate-operator/0.log" Oct 07 15:51:06 crc kubenswrapper[4672]: I1007 15:51:06.759212 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-v685k_38637aa1-8ff7-4b40-b3b6-eed0f91514f6/nmstate-webhook/0.log" Oct 07 15:51:07 crc kubenswrapper[4672]: I1007 15:51:07.892646 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:51:07 crc kubenswrapper[4672]: E1007 15:51:07.893948 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:51:18 crc kubenswrapper[4672]: I1007 15:51:18.893507 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:51:18 crc kubenswrapper[4672]: E1007 15:51:18.895254 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:51:24 crc kubenswrapper[4672]: I1007 15:51:24.703456 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-2qk76_cfac07f1-5471-4d32-9064-01b6f600c640/kube-rbac-proxy/0.log" Oct 07 15:51:24 crc kubenswrapper[4672]: I1007 15:51:24.822471 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-2qk76_cfac07f1-5471-4d32-9064-01b6f600c640/controller/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.014057 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-frr-files/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.014443 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-zb5t6_6b9adeec-cd4d-4b83-8dd6-124c90eaa801/frr-k8s-webhook-server/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.203487 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-frr-files/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.214659 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-reloader/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.218193 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-metrics/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.245158 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-reloader/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.487529 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-metrics/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.488289 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-reloader/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.498196 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-frr-files/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.531886 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-metrics/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.659720 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-metrics/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.662612 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-frr-files/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.689223 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-reloader/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.786540 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/controller/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.860796 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/frr-metrics/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.926821 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/kube-rbac-proxy/0.log" Oct 07 15:51:25 crc kubenswrapper[4672]: I1007 15:51:25.990101 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/kube-rbac-proxy-frr/0.log" Oct 07 15:51:26 crc kubenswrapper[4672]: I1007 15:51:26.077586 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/reloader/0.log" Oct 07 15:51:26 crc kubenswrapper[4672]: I1007 15:51:26.293036 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-79645d9fd4-bzw8f_257d8fce-653e-49bf-ba47-f5d0e156298d/manager/0.log" Oct 07 15:51:26 crc kubenswrapper[4672]: I1007 15:51:26.502304 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-9f889fdd4-rjh7w_82da041e-ab89-4015-baa7-491b55cc00ba/webhook-server/0.log" Oct 07 15:51:26 crc kubenswrapper[4672]: I1007 15:51:26.590374 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dnlzl_11d7aebd-bbc3-49e3-b5ac-53377112f97f/kube-rbac-proxy/0.log" Oct 07 15:51:27 crc kubenswrapper[4672]: I1007 15:51:27.214038 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dnlzl_11d7aebd-bbc3-49e3-b5ac-53377112f97f/speaker/0.log" Oct 07 15:51:27 crc kubenswrapper[4672]: I1007 15:51:27.292833 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/frr/0.log" Oct 07 15:51:33 crc kubenswrapper[4672]: I1007 15:51:33.897458 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:51:33 crc kubenswrapper[4672]: E1007 15:51:33.898251 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:51:40 crc kubenswrapper[4672]: I1007 15:51:40.770805 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/util/0.log" Oct 07 15:51:40 crc kubenswrapper[4672]: I1007 15:51:40.965853 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/pull/0.log" Oct 07 15:51:40 crc kubenswrapper[4672]: I1007 15:51:40.972636 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/util/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.017306 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/pull/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.208432 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/util/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.243807 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/extract/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.271654 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/pull/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.400427 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-utilities/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.575359 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-content/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.625537 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-utilities/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.648305 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-content/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.824048 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-content/0.log" Oct 07 15:51:41 crc kubenswrapper[4672]: I1007 15:51:41.839010 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-utilities/0.log" Oct 07 15:51:42 crc kubenswrapper[4672]: I1007 15:51:42.636489 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-utilities/0.log" Oct 07 15:51:42 crc kubenswrapper[4672]: I1007 15:51:42.666378 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/registry-server/0.log" Oct 07 15:51:42 crc kubenswrapper[4672]: I1007 15:51:42.942829 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-utilities/0.log" Oct 07 15:51:43 crc kubenswrapper[4672]: I1007 15:51:43.046310 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-content/0.log" Oct 07 15:51:43 crc kubenswrapper[4672]: I1007 15:51:43.076394 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-content/0.log" Oct 07 15:51:43 crc kubenswrapper[4672]: I1007 15:51:43.242582 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-content/0.log" Oct 07 15:51:43 crc kubenswrapper[4672]: I1007 15:51:43.271160 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-utilities/0.log" Oct 07 15:51:43 crc kubenswrapper[4672]: I1007 15:51:43.546226 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/util/0.log" Oct 07 15:51:43 crc kubenswrapper[4672]: I1007 15:51:43.650472 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/registry-server/0.log" Oct 07 15:51:43 crc kubenswrapper[4672]: I1007 15:51:43.737957 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/util/0.log" Oct 07 15:51:43 crc kubenswrapper[4672]: I1007 15:51:43.786765 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/pull/0.log" Oct 07 15:51:43 crc kubenswrapper[4672]: I1007 15:51:43.805423 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/pull/0.log" Oct 07 15:51:44 crc kubenswrapper[4672]: I1007 15:51:44.508181 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/pull/0.log" Oct 07 15:51:44 crc kubenswrapper[4672]: I1007 15:51:44.553850 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/extract/0.log" Oct 07 15:51:44 crc kubenswrapper[4672]: I1007 15:51:44.581651 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/util/0.log" Oct 07 15:51:44 crc kubenswrapper[4672]: I1007 15:51:44.641262 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-ljjfp_663cdf72-a5c3-4331-90ce-a3b2dfc41c1d/marketplace-operator/0.log" Oct 07 15:51:44 crc kubenswrapper[4672]: I1007 15:51:44.751933 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-utilities/0.log" Oct 07 15:51:44 crc kubenswrapper[4672]: I1007 15:51:44.945688 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-utilities/0.log" Oct 07 15:51:44 crc kubenswrapper[4672]: I1007 15:51:44.968508 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-content/0.log" Oct 07 15:51:44 crc kubenswrapper[4672]: I1007 15:51:44.971928 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-content/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.163396 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-utilities/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.194884 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-content/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.253190 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-utilities/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.361109 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/registry-server/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.415573 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-utilities/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.457351 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-content/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.486380 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-content/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.652489 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-content/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.669540 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-utilities/0.log" Oct 07 15:51:45 crc kubenswrapper[4672]: I1007 15:51:45.890345 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/registry-server/0.log" Oct 07 15:51:47 crc kubenswrapper[4672]: I1007 15:51:47.891700 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:51:47 crc kubenswrapper[4672]: E1007 15:51:47.892225 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:52:00 crc kubenswrapper[4672]: I1007 15:52:00.896806 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:52:00 crc kubenswrapper[4672]: E1007 15:52:00.897616 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:52:13 crc kubenswrapper[4672]: I1007 15:52:13.899257 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:52:13 crc kubenswrapper[4672]: E1007 15:52:13.900473 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:52:26 crc kubenswrapper[4672]: I1007 15:52:26.892815 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:52:26 crc kubenswrapper[4672]: E1007 15:52:26.893953 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:52:39 crc kubenswrapper[4672]: I1007 15:52:39.892511 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:52:39 crc kubenswrapper[4672]: E1007 15:52:39.893347 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:52:51 crc kubenswrapper[4672]: I1007 15:52:51.892138 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:52:51 crc kubenswrapper[4672]: E1007 15:52:51.892943 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:53:03 crc kubenswrapper[4672]: I1007 15:53:03.902799 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:53:03 crc kubenswrapper[4672]: E1007 15:53:03.903596 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:53:17 crc kubenswrapper[4672]: I1007 15:53:17.893247 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:53:17 crc kubenswrapper[4672]: E1007 15:53:17.894111 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:53:31 crc kubenswrapper[4672]: I1007 15:53:31.900972 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:53:31 crc kubenswrapper[4672]: E1007 15:53:31.902328 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:53:45 crc kubenswrapper[4672]: I1007 15:53:45.893502 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:53:45 crc kubenswrapper[4672]: E1007 15:53:45.894738 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 15:53:50 crc kubenswrapper[4672]: I1007 15:53:50.306595 4672 generic.go:334] "Generic (PLEG): container finished" podID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" containerID="8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec" exitCode=0 Oct 07 15:53:50 crc kubenswrapper[4672]: I1007 15:53:50.306702 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-krr2z/must-gather-lmjv9" event={"ID":"ba91ba6b-8fe5-4323-869c-9e2d428d7481","Type":"ContainerDied","Data":"8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec"} Oct 07 15:53:50 crc kubenswrapper[4672]: I1007 15:53:50.307597 4672 scope.go:117] "RemoveContainer" containerID="8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec" Oct 07 15:53:50 crc kubenswrapper[4672]: I1007 15:53:50.519299 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-krr2z_must-gather-lmjv9_ba91ba6b-8fe5-4323-869c-9e2d428d7481/gather/0.log" Oct 07 15:53:58 crc kubenswrapper[4672]: I1007 15:53:58.254425 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-krr2z/must-gather-lmjv9"] Oct 07 15:53:58 crc kubenswrapper[4672]: I1007 15:53:58.255230 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-krr2z/must-gather-lmjv9" podUID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" containerName="copy" containerID="cri-o://3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06" gracePeriod=2 Oct 07 15:53:58 crc kubenswrapper[4672]: I1007 15:53:58.279102 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-krr2z/must-gather-lmjv9"] Oct 07 15:53:58 crc kubenswrapper[4672]: I1007 15:53:58.892201 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.238045 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-krr2z_must-gather-lmjv9_ba91ba6b-8fe5-4323-869c-9e2d428d7481/copy/0.log" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.239070 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.362558 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlr67\" (UniqueName: \"kubernetes.io/projected/ba91ba6b-8fe5-4323-869c-9e2d428d7481-kube-api-access-nlr67\") pod \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\" (UID: \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\") " Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.362655 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba91ba6b-8fe5-4323-869c-9e2d428d7481-must-gather-output\") pod \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\" (UID: \"ba91ba6b-8fe5-4323-869c-9e2d428d7481\") " Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.368918 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba91ba6b-8fe5-4323-869c-9e2d428d7481-kube-api-access-nlr67" (OuterVolumeSpecName: "kube-api-access-nlr67") pod "ba91ba6b-8fe5-4323-869c-9e2d428d7481" (UID: "ba91ba6b-8fe5-4323-869c-9e2d428d7481"). InnerVolumeSpecName "kube-api-access-nlr67". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.421373 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"7b14dc72c012e9e9b834a4e904bd630ffe81f4604ea50e7fe43d42ffb33e1374"} Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.424460 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-krr2z_must-gather-lmjv9_ba91ba6b-8fe5-4323-869c-9e2d428d7481/copy/0.log" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.424840 4672 generic.go:334] "Generic (PLEG): container finished" podID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" containerID="3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06" exitCode=143 Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.424888 4672 scope.go:117] "RemoveContainer" containerID="3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.424946 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-krr2z/must-gather-lmjv9" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.443924 4672 scope.go:117] "RemoveContainer" containerID="8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.470857 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlr67\" (UniqueName: \"kubernetes.io/projected/ba91ba6b-8fe5-4323-869c-9e2d428d7481-kube-api-access-nlr67\") on node \"crc\" DevicePath \"\"" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.508576 4672 scope.go:117] "RemoveContainer" containerID="3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06" Oct 07 15:53:59 crc kubenswrapper[4672]: E1007 15:53:59.509962 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06\": container with ID starting with 3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06 not found: ID does not exist" containerID="3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.510045 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06"} err="failed to get container status \"3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06\": rpc error: code = NotFound desc = could not find container \"3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06\": container with ID starting with 3dfb2e2575862b3572696bca8397458592f0d4ab2327c1d5161198cac3e4ec06 not found: ID does not exist" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.510330 4672 scope.go:117] "RemoveContainer" containerID="8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec" Oct 07 15:53:59 crc kubenswrapper[4672]: E1007 15:53:59.511151 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec\": container with ID starting with 8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec not found: ID does not exist" containerID="8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.511185 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec"} err="failed to get container status \"8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec\": rpc error: code = NotFound desc = could not find container \"8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec\": container with ID starting with 8919f6afd39d5becbf44f82b6ada2734b7ea4cc0c854625c6a47ad27f4d637ec not found: ID does not exist" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.561884 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba91ba6b-8fe5-4323-869c-9e2d428d7481-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ba91ba6b-8fe5-4323-869c-9e2d428d7481" (UID: "ba91ba6b-8fe5-4323-869c-9e2d428d7481"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.573419 4672 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba91ba6b-8fe5-4323-869c-9e2d428d7481-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 07 15:53:59 crc kubenswrapper[4672]: I1007 15:53:59.903496 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" path="/var/lib/kubelet/pods/ba91ba6b-8fe5-4323-869c-9e2d428d7481/volumes" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.331351 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-r64p8/must-gather-vvfkw"] Oct 07 15:54:24 crc kubenswrapper[4672]: E1007 15:54:24.332438 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870d9f99-5388-4ff1-ae59-050ebe8801de" containerName="container-00" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.332457 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="870d9f99-5388-4ff1-ae59-050ebe8801de" containerName="container-00" Oct 07 15:54:24 crc kubenswrapper[4672]: E1007 15:54:24.332483 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" containerName="gather" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.332492 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" containerName="gather" Oct 07 15:54:24 crc kubenswrapper[4672]: E1007 15:54:24.332513 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" containerName="copy" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.332522 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" containerName="copy" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.332724 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" containerName="copy" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.332751 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba91ba6b-8fe5-4323-869c-9e2d428d7481" containerName="gather" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.332772 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="870d9f99-5388-4ff1-ae59-050ebe8801de" containerName="container-00" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.334006 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.336415 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-r64p8"/"openshift-service-ca.crt" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.336606 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-r64p8"/"default-dockercfg-7pbr2" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.336963 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-r64p8"/"kube-root-ca.crt" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.341035 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-r64p8/must-gather-vvfkw"] Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.437301 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1fc62e64-152a-4067-af09-82c3bdb1b25c-must-gather-output\") pod \"must-gather-vvfkw\" (UID: \"1fc62e64-152a-4067-af09-82c3bdb1b25c\") " pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.437355 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbbvt\" (UniqueName: \"kubernetes.io/projected/1fc62e64-152a-4067-af09-82c3bdb1b25c-kube-api-access-dbbvt\") pod \"must-gather-vvfkw\" (UID: \"1fc62e64-152a-4067-af09-82c3bdb1b25c\") " pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.538738 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1fc62e64-152a-4067-af09-82c3bdb1b25c-must-gather-output\") pod \"must-gather-vvfkw\" (UID: \"1fc62e64-152a-4067-af09-82c3bdb1b25c\") " pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.538790 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbbvt\" (UniqueName: \"kubernetes.io/projected/1fc62e64-152a-4067-af09-82c3bdb1b25c-kube-api-access-dbbvt\") pod \"must-gather-vvfkw\" (UID: \"1fc62e64-152a-4067-af09-82c3bdb1b25c\") " pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.539392 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1fc62e64-152a-4067-af09-82c3bdb1b25c-must-gather-output\") pod \"must-gather-vvfkw\" (UID: \"1fc62e64-152a-4067-af09-82c3bdb1b25c\") " pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.561897 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbbvt\" (UniqueName: \"kubernetes.io/projected/1fc62e64-152a-4067-af09-82c3bdb1b25c-kube-api-access-dbbvt\") pod \"must-gather-vvfkw\" (UID: \"1fc62e64-152a-4067-af09-82c3bdb1b25c\") " pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 15:54:24 crc kubenswrapper[4672]: I1007 15:54:24.653222 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 15:54:25 crc kubenswrapper[4672]: I1007 15:54:25.143785 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-r64p8/must-gather-vvfkw"] Oct 07 15:54:25 crc kubenswrapper[4672]: I1007 15:54:25.650979 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/must-gather-vvfkw" event={"ID":"1fc62e64-152a-4067-af09-82c3bdb1b25c","Type":"ContainerStarted","Data":"4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f"} Oct 07 15:54:25 crc kubenswrapper[4672]: I1007 15:54:25.651067 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/must-gather-vvfkw" event={"ID":"1fc62e64-152a-4067-af09-82c3bdb1b25c","Type":"ContainerStarted","Data":"31091feeb8697fc5712ef584795f7ba5da67f28c75f1f879ec0faaba426c8059"} Oct 07 15:54:26 crc kubenswrapper[4672]: I1007 15:54:26.669301 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/must-gather-vvfkw" event={"ID":"1fc62e64-152a-4067-af09-82c3bdb1b25c","Type":"ContainerStarted","Data":"0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8"} Oct 07 15:54:26 crc kubenswrapper[4672]: I1007 15:54:26.688901 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-r64p8/must-gather-vvfkw" podStartSLOduration=2.688873417 podStartE2EDuration="2.688873417s" podCreationTimestamp="2025-10-07 15:54:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:54:26.686402835 +0000 UTC m=+3943.661581426" watchObservedRunningTime="2025-10-07 15:54:26.688873417 +0000 UTC m=+3943.664051998" Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.252428 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-r64p8/crc-debug-d6gg8"] Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.254754 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.351273 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r8tx\" (UniqueName: \"kubernetes.io/projected/ac05dad9-2503-447b-aeac-3a3dc3876fdf-kube-api-access-7r8tx\") pod \"crc-debug-d6gg8\" (UID: \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\") " pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.351604 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ac05dad9-2503-447b-aeac-3a3dc3876fdf-host\") pod \"crc-debug-d6gg8\" (UID: \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\") " pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.453814 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r8tx\" (UniqueName: \"kubernetes.io/projected/ac05dad9-2503-447b-aeac-3a3dc3876fdf-kube-api-access-7r8tx\") pod \"crc-debug-d6gg8\" (UID: \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\") " pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.454007 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ac05dad9-2503-447b-aeac-3a3dc3876fdf-host\") pod \"crc-debug-d6gg8\" (UID: \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\") " pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.454137 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ac05dad9-2503-447b-aeac-3a3dc3876fdf-host\") pod \"crc-debug-d6gg8\" (UID: \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\") " pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.477761 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r8tx\" (UniqueName: \"kubernetes.io/projected/ac05dad9-2503-447b-aeac-3a3dc3876fdf-kube-api-access-7r8tx\") pod \"crc-debug-d6gg8\" (UID: \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\") " pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.574646 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:54:29 crc kubenswrapper[4672]: W1007 15:54:29.624091 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac05dad9_2503_447b_aeac_3a3dc3876fdf.slice/crio-4acbdcfe130cf0d69acf9c0227a623177aded9ea8598153083ba49f23b697fff WatchSource:0}: Error finding container 4acbdcfe130cf0d69acf9c0227a623177aded9ea8598153083ba49f23b697fff: Status 404 returned error can't find the container with id 4acbdcfe130cf0d69acf9c0227a623177aded9ea8598153083ba49f23b697fff Oct 07 15:54:29 crc kubenswrapper[4672]: I1007 15:54:29.702497 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/crc-debug-d6gg8" event={"ID":"ac05dad9-2503-447b-aeac-3a3dc3876fdf","Type":"ContainerStarted","Data":"4acbdcfe130cf0d69acf9c0227a623177aded9ea8598153083ba49f23b697fff"} Oct 07 15:54:30 crc kubenswrapper[4672]: I1007 15:54:30.716938 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/crc-debug-d6gg8" event={"ID":"ac05dad9-2503-447b-aeac-3a3dc3876fdf","Type":"ContainerStarted","Data":"c54badaf78d8b3ffc955932e358d78b00d2243603831d2ee73926ac3f872c124"} Oct 07 15:54:30 crc kubenswrapper[4672]: I1007 15:54:30.741880 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-r64p8/crc-debug-d6gg8" podStartSLOduration=1.741848041 podStartE2EDuration="1.741848041s" podCreationTimestamp="2025-10-07 15:54:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:54:30.730529013 +0000 UTC m=+3947.705707584" watchObservedRunningTime="2025-10-07 15:54:30.741848041 +0000 UTC m=+3947.717026632" Oct 07 15:54:55 crc kubenswrapper[4672]: I1007 15:54:55.922390 4672 scope.go:117] "RemoveContainer" containerID="46fc3e134555cd9adc6f0286b81317129f3145a142503cb3ec4a7b9338b3c21e" Oct 07 15:54:55 crc kubenswrapper[4672]: I1007 15:54:55.952588 4672 scope.go:117] "RemoveContainer" containerID="94ed1e1d470d42194d3caa386d5b74b1c70d72719b6a63d217a1dd2cd992df57" Oct 07 15:54:56 crc kubenswrapper[4672]: I1007 15:54:56.024198 4672 scope.go:117] "RemoveContainer" containerID="c3587cfc56574c000a002bd8932f47fe9a80ba208971f5f99e30430d6448add8" Oct 07 15:55:37 crc kubenswrapper[4672]: I1007 15:55:37.705101 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6d9d7bb546-5l2xz_a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4/barbican-api/0.log" Oct 07 15:55:37 crc kubenswrapper[4672]: I1007 15:55:37.752268 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6d9d7bb546-5l2xz_a9c5aa28-0ac6-4815-aecb-a8bfe04f2bb4/barbican-api-log/0.log" Oct 07 15:55:38 crc kubenswrapper[4672]: I1007 15:55:38.519125 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-f4cdcdbb-4hkrg_dd7a9076-c512-4e9c-b626-482122ee920b/barbican-keystone-listener/0.log" Oct 07 15:55:38 crc kubenswrapper[4672]: I1007 15:55:38.537122 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-f4cdcdbb-4hkrg_dd7a9076-c512-4e9c-b626-482122ee920b/barbican-keystone-listener-log/0.log" Oct 07 15:55:38 crc kubenswrapper[4672]: I1007 15:55:38.780854 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-fbf4c46f7-rxrcl_79138985-a31b-43fd-aec5-cfd8abf453d1/barbican-worker/0.log" Oct 07 15:55:38 crc kubenswrapper[4672]: I1007 15:55:38.807525 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-fbf4c46f7-rxrcl_79138985-a31b-43fd-aec5-cfd8abf453d1/barbican-worker-log/0.log" Oct 07 15:55:38 crc kubenswrapper[4672]: I1007 15:55:38.990009 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-kfc5w_4a1d79b4-a176-48af-9c78-59c7ddd39b71/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:39 crc kubenswrapper[4672]: I1007 15:55:39.279504 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_35a2c542-5b6e-4828-a670-fc4345572dc0/ceilometer-notification-agent/0.log" Oct 07 15:55:39 crc kubenswrapper[4672]: I1007 15:55:39.294289 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_35a2c542-5b6e-4828-a670-fc4345572dc0/ceilometer-central-agent/0.log" Oct 07 15:55:39 crc kubenswrapper[4672]: I1007 15:55:39.325320 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_35a2c542-5b6e-4828-a670-fc4345572dc0/proxy-httpd/0.log" Oct 07 15:55:39 crc kubenswrapper[4672]: I1007 15:55:39.499982 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_35a2c542-5b6e-4828-a670-fc4345572dc0/sg-core/0.log" Oct 07 15:55:39 crc kubenswrapper[4672]: I1007 15:55:39.593518 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cac7576e-f243-46c2-90cb-e62d4c822d81/cinder-api/0.log" Oct 07 15:55:39 crc kubenswrapper[4672]: I1007 15:55:39.695679 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cac7576e-f243-46c2-90cb-e62d4c822d81/cinder-api-log/0.log" Oct 07 15:55:39 crc kubenswrapper[4672]: I1007 15:55:39.826280 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_133e6c33-5248-4463-9a3b-75431b468373/cinder-scheduler/0.log" Oct 07 15:55:40 crc kubenswrapper[4672]: I1007 15:55:40.035814 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_133e6c33-5248-4463-9a3b-75431b468373/probe/0.log" Oct 07 15:55:40 crc kubenswrapper[4672]: I1007 15:55:40.449040 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-lzg5z_a9f7823a-974b-4ef1-9414-f1aac7bd2179/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:40 crc kubenswrapper[4672]: I1007 15:55:40.645261 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-df6lz_fa45d7c4-d068-4df1-847b-589c7061b6e1/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:40 crc kubenswrapper[4672]: I1007 15:55:40.715083 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-s7tbj_6d5fecb8-bc89-44d0-9413-4ab72d34390a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:40 crc kubenswrapper[4672]: I1007 15:55:40.939774 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-599dd5cb59-whmjd_2645864b-ab57-47a0-8b17-478a93a55a7a/init/0.log" Oct 07 15:55:41 crc kubenswrapper[4672]: I1007 15:55:41.130341 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-5txmn_fa845c6c-c027-483f-b5e8-404778f6a1d4/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:41 crc kubenswrapper[4672]: I1007 15:55:41.147717 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-599dd5cb59-whmjd_2645864b-ab57-47a0-8b17-478a93a55a7a/init/0.log" Oct 07 15:55:41 crc kubenswrapper[4672]: I1007 15:55:41.197690 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-599dd5cb59-whmjd_2645864b-ab57-47a0-8b17-478a93a55a7a/dnsmasq-dns/0.log" Oct 07 15:55:41 crc kubenswrapper[4672]: I1007 15:55:41.377258 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e66032dd-633c-4fa7-b39c-714c4c799aed/glance-httpd/0.log" Oct 07 15:55:41 crc kubenswrapper[4672]: I1007 15:55:41.404812 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e66032dd-633c-4fa7-b39c-714c4c799aed/glance-log/0.log" Oct 07 15:55:41 crc kubenswrapper[4672]: I1007 15:55:41.596020 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_180e3a93-5be7-42c2-832c-86e29fb5444d/glance-httpd/0.log" Oct 07 15:55:41 crc kubenswrapper[4672]: I1007 15:55:41.605206 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_180e3a93-5be7-42c2-832c-86e29fb5444d/glance-log/0.log" Oct 07 15:55:41 crc kubenswrapper[4672]: I1007 15:55:41.744359 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-694678548d-bbtxt_d3c1e726-5fce-4f95-952f-effb9a8993f3/horizon/0.log" Oct 07 15:55:41 crc kubenswrapper[4672]: I1007 15:55:41.995571 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-9bsgj_be75457c-c14d-4827-850e-2619993cc1f6/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:42 crc kubenswrapper[4672]: I1007 15:55:42.161888 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-694678548d-bbtxt_d3c1e726-5fce-4f95-952f-effb9a8993f3/horizon-log/0.log" Oct 07 15:55:42 crc kubenswrapper[4672]: I1007 15:55:42.167912 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-x9l79_3f3dc5f0-5f9c-4ffb-8dfe-8d2f33d02a08/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:42 crc kubenswrapper[4672]: I1007 15:55:42.394752 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5858475b48-6sdfp_718ff39a-552f-48a0-91f4-c9fa07c17b7d/keystone-api/0.log" Oct 07 15:55:42 crc kubenswrapper[4672]: I1007 15:55:42.413592 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_0eb79c67-08e6-4bfc-9b12-333500d26d9c/kube-state-metrics/0.log" Oct 07 15:55:42 crc kubenswrapper[4672]: I1007 15:55:42.599768 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-wstfd_0c6d56dd-dfd4-446f-bc0a-f0c7c5a10841/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:42 crc kubenswrapper[4672]: I1007 15:55:42.977772 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6c7bf59845-qwdkz_fdb59d6a-51ea-4daa-904b-54a9a1af23f7/neutron-api/0.log" Oct 07 15:55:43 crc kubenswrapper[4672]: I1007 15:55:43.003162 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6c7bf59845-qwdkz_fdb59d6a-51ea-4daa-904b-54a9a1af23f7/neutron-httpd/0.log" Oct 07 15:55:43 crc kubenswrapper[4672]: I1007 15:55:43.241544 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-hj8x7_ebc0c220-98a0-4285-89ec-689749e5f16b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:43 crc kubenswrapper[4672]: I1007 15:55:43.875630 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3/nova-api-log/0.log" Oct 07 15:55:44 crc kubenswrapper[4672]: I1007 15:55:44.097668 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_34190ed8-8a54-40ab-b9d4-85f48bc24ee9/nova-cell0-conductor-conductor/0.log" Oct 07 15:55:44 crc kubenswrapper[4672]: I1007 15:55:44.292473 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_7cc6827c-96b0-4d2c-a45a-6b11d20e6ee3/nova-api-api/0.log" Oct 07 15:55:44 crc kubenswrapper[4672]: I1007 15:55:44.533543 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_38be140b-8203-4f2a-8215-6c76db50244e/nova-cell1-conductor-conductor/0.log" Oct 07 15:55:44 crc kubenswrapper[4672]: I1007 15:55:44.683469 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_696b0eb7-d73f-4a68-a991-d9af3b74fea2/nova-cell1-novncproxy-novncproxy/0.log" Oct 07 15:55:44 crc kubenswrapper[4672]: I1007 15:55:44.840676 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-2s7mn_be41884e-d5f3-42e7-bd95-3c04629e26e3/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:45 crc kubenswrapper[4672]: I1007 15:55:45.049971 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_050ad2ed-cc5a-41bb-a2f9-437deee02938/nova-metadata-log/0.log" Oct 07 15:55:45 crc kubenswrapper[4672]: I1007 15:55:45.568988 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_21b9d12a-9d4c-49c3-8e94-80d538f4853a/nova-scheduler-scheduler/0.log" Oct 07 15:55:45 crc kubenswrapper[4672]: I1007 15:55:45.728966 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6331c4be-e7ca-4786-9ac8-3aac826906e0/mysql-bootstrap/0.log" Oct 07 15:55:45 crc kubenswrapper[4672]: I1007 15:55:45.983121 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6331c4be-e7ca-4786-9ac8-3aac826906e0/galera/0.log" Oct 07 15:55:46 crc kubenswrapper[4672]: I1007 15:55:46.005583 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6331c4be-e7ca-4786-9ac8-3aac826906e0/mysql-bootstrap/0.log" Oct 07 15:55:46 crc kubenswrapper[4672]: I1007 15:55:46.257892 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3168983-5af6-4e27-a5f0-23c80d627c0a/mysql-bootstrap/0.log" Oct 07 15:55:46 crc kubenswrapper[4672]: I1007 15:55:46.547301 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3168983-5af6-4e27-a5f0-23c80d627c0a/mysql-bootstrap/0.log" Oct 07 15:55:46 crc kubenswrapper[4672]: I1007 15:55:46.552134 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3168983-5af6-4e27-a5f0-23c80d627c0a/galera/0.log" Oct 07 15:55:46 crc kubenswrapper[4672]: I1007 15:55:46.785208 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_452e785c-16f5-478c-8c52-638692cd3abd/openstackclient/0.log" Oct 07 15:55:46 crc kubenswrapper[4672]: I1007 15:55:46.809221 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_050ad2ed-cc5a-41bb-a2f9-437deee02938/nova-metadata-metadata/0.log" Oct 07 15:55:46 crc kubenswrapper[4672]: I1007 15:55:46.997768 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-djrzm_76c480b3-ae62-4b55-b055-d0c1c0ff5777/openstack-network-exporter/0.log" Oct 07 15:55:47 crc kubenswrapper[4672]: I1007 15:55:47.270915 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5p5sl_73afc1bf-c6d8-4d61-b050-db75d0cd219f/ovsdb-server-init/0.log" Oct 07 15:55:47 crc kubenswrapper[4672]: I1007 15:55:47.511958 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5p5sl_73afc1bf-c6d8-4d61-b050-db75d0cd219f/ovsdb-server/0.log" Oct 07 15:55:47 crc kubenswrapper[4672]: I1007 15:55:47.584163 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5p5sl_73afc1bf-c6d8-4d61-b050-db75d0cd219f/ovsdb-server-init/0.log" Oct 07 15:55:47 crc kubenswrapper[4672]: I1007 15:55:47.623069 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5p5sl_73afc1bf-c6d8-4d61-b050-db75d0cd219f/ovs-vswitchd/0.log" Oct 07 15:55:47 crc kubenswrapper[4672]: I1007 15:55:47.829512 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-tzsfl_0e4d1227-361f-4c04-b0ce-12295f021364/ovn-controller/0.log" Oct 07 15:55:48 crc kubenswrapper[4672]: I1007 15:55:48.106614 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-lngw9_04f4d7c8-1cca-4233-9cc5-dfa205f89c49/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:48 crc kubenswrapper[4672]: I1007 15:55:48.170935 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8b2cc7fd-952a-4115-a4e4-408bfc75a54d/openstack-network-exporter/0.log" Oct 07 15:55:48 crc kubenswrapper[4672]: I1007 15:55:48.315852 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8b2cc7fd-952a-4115-a4e4-408bfc75a54d/ovn-northd/0.log" Oct 07 15:55:48 crc kubenswrapper[4672]: I1007 15:55:48.502717 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6a4f76b5-fd91-462b-9e85-9c66d83ab353/openstack-network-exporter/0.log" Oct 07 15:55:48 crc kubenswrapper[4672]: I1007 15:55:48.591639 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6a4f76b5-fd91-462b-9e85-9c66d83ab353/ovsdbserver-nb/0.log" Oct 07 15:55:48 crc kubenswrapper[4672]: I1007 15:55:48.755730 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_16ba9419-35b8-45df-bf64-86d144d67284/openstack-network-exporter/0.log" Oct 07 15:55:48 crc kubenswrapper[4672]: I1007 15:55:48.854592 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_16ba9419-35b8-45df-bf64-86d144d67284/ovsdbserver-sb/0.log" Oct 07 15:55:49 crc kubenswrapper[4672]: I1007 15:55:49.068638 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-78768fbc98-wqtlt_8ee6c49e-c514-4798-ab81-0bc31c59048d/placement-api/0.log" Oct 07 15:55:49 crc kubenswrapper[4672]: I1007 15:55:49.237376 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-78768fbc98-wqtlt_8ee6c49e-c514-4798-ab81-0bc31c59048d/placement-log/0.log" Oct 07 15:55:49 crc kubenswrapper[4672]: I1007 15:55:49.718759 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_60a532ee-7772-464b-8f0b-854377647a20/setup-container/0.log" Oct 07 15:55:50 crc kubenswrapper[4672]: I1007 15:55:50.002653 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_60a532ee-7772-464b-8f0b-854377647a20/setup-container/0.log" Oct 07 15:55:50 crc kubenswrapper[4672]: I1007 15:55:50.012901 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_60a532ee-7772-464b-8f0b-854377647a20/rabbitmq/0.log" Oct 07 15:55:50 crc kubenswrapper[4672]: I1007 15:55:50.243846 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6915154f-a1f8-4f93-8a8b-00020a761f95/setup-container/0.log" Oct 07 15:55:50 crc kubenswrapper[4672]: I1007 15:55:50.408516 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6915154f-a1f8-4f93-8a8b-00020a761f95/setup-container/0.log" Oct 07 15:55:50 crc kubenswrapper[4672]: I1007 15:55:50.425973 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6915154f-a1f8-4f93-8a8b-00020a761f95/rabbitmq/0.log" Oct 07 15:55:50 crc kubenswrapper[4672]: I1007 15:55:50.625600 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-j8l9f_c1ded63b-448b-4e94-9c53-e87268f775ac/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:50 crc kubenswrapper[4672]: I1007 15:55:50.873811 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-lrt9s_1861ff34-6944-4cea-950f-8efc95e05f1a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:50 crc kubenswrapper[4672]: I1007 15:55:50.978409 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-hjn5w_fa79d159-5cc6-405e-8725-d3f49e9f75f8/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:51 crc kubenswrapper[4672]: I1007 15:55:51.136912 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-mr4zp_414dfaec-a340-4080-b3e7-da4966078c60/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:51 crc kubenswrapper[4672]: I1007 15:55:51.751307 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-mslxh_b69369f3-e53e-4aea-ab35-dcc1e9f3d56f/ssh-known-hosts-edpm-deployment/0.log" Oct 07 15:55:51 crc kubenswrapper[4672]: I1007 15:55:51.990998 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6d96d8478f-sx526_e7406b25-bb39-409b-bde4-75cc32bf4ae2/proxy-server/0.log" Oct 07 15:55:52 crc kubenswrapper[4672]: I1007 15:55:52.022110 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6d96d8478f-sx526_e7406b25-bb39-409b-bde4-75cc32bf4ae2/proxy-httpd/0.log" Oct 07 15:55:52 crc kubenswrapper[4672]: I1007 15:55:52.165643 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-r4b6l_a57e275b-ff63-4284-aae2-7fbc858c0128/swift-ring-rebalance/0.log" Oct 07 15:55:52 crc kubenswrapper[4672]: I1007 15:55:52.352184 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/account-auditor/0.log" Oct 07 15:55:52 crc kubenswrapper[4672]: I1007 15:55:52.393776 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/account-reaper/0.log" Oct 07 15:55:52 crc kubenswrapper[4672]: I1007 15:55:52.574865 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/account-server/0.log" Oct 07 15:55:52 crc kubenswrapper[4672]: I1007 15:55:52.576103 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/account-replicator/0.log" Oct 07 15:55:52 crc kubenswrapper[4672]: I1007 15:55:52.693118 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/container-auditor/0.log" Oct 07 15:55:52 crc kubenswrapper[4672]: I1007 15:55:52.869303 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/container-server/0.log" Oct 07 15:55:52 crc kubenswrapper[4672]: I1007 15:55:52.883667 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/container-replicator/0.log" Oct 07 15:55:53 crc kubenswrapper[4672]: I1007 15:55:53.000092 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/container-updater/0.log" Oct 07 15:55:53 crc kubenswrapper[4672]: I1007 15:55:53.320748 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-expirer/0.log" Oct 07 15:55:53 crc kubenswrapper[4672]: I1007 15:55:53.414725 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-auditor/0.log" Oct 07 15:55:53 crc kubenswrapper[4672]: I1007 15:55:53.534694 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-replicator/0.log" Oct 07 15:55:53 crc kubenswrapper[4672]: I1007 15:55:53.655635 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-server/0.log" Oct 07 15:55:53 crc kubenswrapper[4672]: I1007 15:55:53.657297 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/object-updater/0.log" Oct 07 15:55:53 crc kubenswrapper[4672]: I1007 15:55:53.794257 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/rsync/0.log" Oct 07 15:55:53 crc kubenswrapper[4672]: I1007 15:55:53.922161 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_99812f34-1d2c-4f0f-bf07-9569fde6d437/swift-recon-cron/0.log" Oct 07 15:55:54 crc kubenswrapper[4672]: I1007 15:55:54.066420 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-jktl2_a7d08d2c-7a70-47f3-afa0-f93d34efd7dc/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:55:54 crc kubenswrapper[4672]: I1007 15:55:54.309755 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_5104b964-0b2d-4b1f-920b-a0b3016ed885/tempest-tests-tempest-tests-runner/0.log" Oct 07 15:55:54 crc kubenswrapper[4672]: I1007 15:55:54.446192 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_91f7fb17-f404-4b7b-9e67-4b908e341901/test-operator-logs-container/0.log" Oct 07 15:55:54 crc kubenswrapper[4672]: I1007 15:55:54.640279 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-kzhkv_6b18bc85-3a86-4989-a2b2-dd34d127023a/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 15:56:01 crc kubenswrapper[4672]: I1007 15:56:01.698186 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_09d93eef-fed4-4023-80a4-8927b2631580/memcached/0.log" Oct 07 15:56:22 crc kubenswrapper[4672]: I1007 15:56:22.950505 4672 generic.go:334] "Generic (PLEG): container finished" podID="ac05dad9-2503-447b-aeac-3a3dc3876fdf" containerID="c54badaf78d8b3ffc955932e358d78b00d2243603831d2ee73926ac3f872c124" exitCode=0 Oct 07 15:56:22 crc kubenswrapper[4672]: I1007 15:56:22.950599 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/crc-debug-d6gg8" event={"ID":"ac05dad9-2503-447b-aeac-3a3dc3876fdf","Type":"ContainerDied","Data":"c54badaf78d8b3ffc955932e358d78b00d2243603831d2ee73926ac3f872c124"} Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.079718 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.111710 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-r64p8/crc-debug-d6gg8"] Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.122473 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-r64p8/crc-debug-d6gg8"] Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.263907 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ac05dad9-2503-447b-aeac-3a3dc3876fdf-host\") pod \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\" (UID: \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\") " Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.264048 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac05dad9-2503-447b-aeac-3a3dc3876fdf-host" (OuterVolumeSpecName: "host") pod "ac05dad9-2503-447b-aeac-3a3dc3876fdf" (UID: "ac05dad9-2503-447b-aeac-3a3dc3876fdf"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.264104 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r8tx\" (UniqueName: \"kubernetes.io/projected/ac05dad9-2503-447b-aeac-3a3dc3876fdf-kube-api-access-7r8tx\") pod \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\" (UID: \"ac05dad9-2503-447b-aeac-3a3dc3876fdf\") " Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.264646 4672 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ac05dad9-2503-447b-aeac-3a3dc3876fdf-host\") on node \"crc\" DevicePath \"\"" Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.269271 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac05dad9-2503-447b-aeac-3a3dc3876fdf-kube-api-access-7r8tx" (OuterVolumeSpecName: "kube-api-access-7r8tx") pod "ac05dad9-2503-447b-aeac-3a3dc3876fdf" (UID: "ac05dad9-2503-447b-aeac-3a3dc3876fdf"). InnerVolumeSpecName "kube-api-access-7r8tx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.366346 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r8tx\" (UniqueName: \"kubernetes.io/projected/ac05dad9-2503-447b-aeac-3a3dc3876fdf-kube-api-access-7r8tx\") on node \"crc\" DevicePath \"\"" Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.972677 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4acbdcfe130cf0d69acf9c0227a623177aded9ea8598153083ba49f23b697fff" Oct 07 15:56:24 crc kubenswrapper[4672]: I1007 15:56:24.972769 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-d6gg8" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.261326 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-r64p8/crc-debug-vp4rr"] Oct 07 15:56:25 crc kubenswrapper[4672]: E1007 15:56:25.261717 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac05dad9-2503-447b-aeac-3a3dc3876fdf" containerName="container-00" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.261729 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac05dad9-2503-447b-aeac-3a3dc3876fdf" containerName="container-00" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.261899 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac05dad9-2503-447b-aeac-3a3dc3876fdf" containerName="container-00" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.262593 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.284057 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-host\") pod \"crc-debug-vp4rr\" (UID: \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\") " pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.284254 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lt9l\" (UniqueName: \"kubernetes.io/projected/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-kube-api-access-7lt9l\") pod \"crc-debug-vp4rr\" (UID: \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\") " pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.385534 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-host\") pod \"crc-debug-vp4rr\" (UID: \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\") " pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.385641 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-host\") pod \"crc-debug-vp4rr\" (UID: \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\") " pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.385826 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lt9l\" (UniqueName: \"kubernetes.io/projected/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-kube-api-access-7lt9l\") pod \"crc-debug-vp4rr\" (UID: \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\") " pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.403871 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lt9l\" (UniqueName: \"kubernetes.io/projected/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-kube-api-access-7lt9l\") pod \"crc-debug-vp4rr\" (UID: \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\") " pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.578374 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.909656 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac05dad9-2503-447b-aeac-3a3dc3876fdf" path="/var/lib/kubelet/pods/ac05dad9-2503-447b-aeac-3a3dc3876fdf/volumes" Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.988147 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/crc-debug-vp4rr" event={"ID":"7a5165e8-9b16-49d4-b52f-dad01b31bdd1","Type":"ContainerStarted","Data":"98f692c41c424032c9b92b3fbe0ca5b545a95608022ce2c59f428874a6f7c7b1"} Oct 07 15:56:25 crc kubenswrapper[4672]: I1007 15:56:25.988206 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/crc-debug-vp4rr" event={"ID":"7a5165e8-9b16-49d4-b52f-dad01b31bdd1","Type":"ContainerStarted","Data":"4b259cd5be18651bc64da727b6574397839dd43c6f1af74f902433fb946bdb19"} Oct 07 15:56:26 crc kubenswrapper[4672]: I1007 15:56:26.004160 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-r64p8/crc-debug-vp4rr" podStartSLOduration=1.004136787 podStartE2EDuration="1.004136787s" podCreationTimestamp="2025-10-07 15:56:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 15:56:26.001935083 +0000 UTC m=+4062.977113684" watchObservedRunningTime="2025-10-07 15:56:26.004136787 +0000 UTC m=+4062.979315368" Oct 07 15:56:26 crc kubenswrapper[4672]: I1007 15:56:26.650035 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:56:26 crc kubenswrapper[4672]: I1007 15:56:26.650101 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:56:26 crc kubenswrapper[4672]: I1007 15:56:26.998103 4672 generic.go:334] "Generic (PLEG): container finished" podID="7a5165e8-9b16-49d4-b52f-dad01b31bdd1" containerID="98f692c41c424032c9b92b3fbe0ca5b545a95608022ce2c59f428874a6f7c7b1" exitCode=0 Oct 07 15:56:26 crc kubenswrapper[4672]: I1007 15:56:26.998157 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/crc-debug-vp4rr" event={"ID":"7a5165e8-9b16-49d4-b52f-dad01b31bdd1","Type":"ContainerDied","Data":"98f692c41c424032c9b92b3fbe0ca5b545a95608022ce2c59f428874a6f7c7b1"} Oct 07 15:56:28 crc kubenswrapper[4672]: I1007 15:56:28.120763 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:28 crc kubenswrapper[4672]: I1007 15:56:28.142578 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-host\") pod \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\" (UID: \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\") " Oct 07 15:56:28 crc kubenswrapper[4672]: I1007 15:56:28.142653 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lt9l\" (UniqueName: \"kubernetes.io/projected/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-kube-api-access-7lt9l\") pod \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\" (UID: \"7a5165e8-9b16-49d4-b52f-dad01b31bdd1\") " Oct 07 15:56:28 crc kubenswrapper[4672]: I1007 15:56:28.142707 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-host" (OuterVolumeSpecName: "host") pod "7a5165e8-9b16-49d4-b52f-dad01b31bdd1" (UID: "7a5165e8-9b16-49d4-b52f-dad01b31bdd1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:56:28 crc kubenswrapper[4672]: I1007 15:56:28.143146 4672 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-host\") on node \"crc\" DevicePath \"\"" Oct 07 15:56:28 crc kubenswrapper[4672]: I1007 15:56:28.148870 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-kube-api-access-7lt9l" (OuterVolumeSpecName: "kube-api-access-7lt9l") pod "7a5165e8-9b16-49d4-b52f-dad01b31bdd1" (UID: "7a5165e8-9b16-49d4-b52f-dad01b31bdd1"). InnerVolumeSpecName "kube-api-access-7lt9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:56:28 crc kubenswrapper[4672]: I1007 15:56:28.244358 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lt9l\" (UniqueName: \"kubernetes.io/projected/7a5165e8-9b16-49d4-b52f-dad01b31bdd1-kube-api-access-7lt9l\") on node \"crc\" DevicePath \"\"" Oct 07 15:56:29 crc kubenswrapper[4672]: I1007 15:56:29.015330 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/crc-debug-vp4rr" event={"ID":"7a5165e8-9b16-49d4-b52f-dad01b31bdd1","Type":"ContainerDied","Data":"4b259cd5be18651bc64da727b6574397839dd43c6f1af74f902433fb946bdb19"} Oct 07 15:56:29 crc kubenswrapper[4672]: I1007 15:56:29.015378 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b259cd5be18651bc64da727b6574397839dd43c6f1af74f902433fb946bdb19" Oct 07 15:56:29 crc kubenswrapper[4672]: I1007 15:56:29.015385 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-vp4rr" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.599459 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fptmm"] Oct 07 15:56:31 crc kubenswrapper[4672]: E1007 15:56:31.600677 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a5165e8-9b16-49d4-b52f-dad01b31bdd1" containerName="container-00" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.600697 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a5165e8-9b16-49d4-b52f-dad01b31bdd1" containerName="container-00" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.602680 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a5165e8-9b16-49d4-b52f-dad01b31bdd1" containerName="container-00" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.610580 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.653363 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fptmm"] Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.741167 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nljzb\" (UniqueName: \"kubernetes.io/projected/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-kube-api-access-nljzb\") pod \"redhat-marketplace-fptmm\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.741246 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-utilities\") pod \"redhat-marketplace-fptmm\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.741262 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-catalog-content\") pod \"redhat-marketplace-fptmm\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.842762 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nljzb\" (UniqueName: \"kubernetes.io/projected/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-kube-api-access-nljzb\") pod \"redhat-marketplace-fptmm\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.842877 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-utilities\") pod \"redhat-marketplace-fptmm\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.842903 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-catalog-content\") pod \"redhat-marketplace-fptmm\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.843351 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-utilities\") pod \"redhat-marketplace-fptmm\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.843554 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-catalog-content\") pod \"redhat-marketplace-fptmm\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.868395 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nljzb\" (UniqueName: \"kubernetes.io/projected/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-kube-api-access-nljzb\") pod \"redhat-marketplace-fptmm\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:31 crc kubenswrapper[4672]: I1007 15:56:31.949628 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:32 crc kubenswrapper[4672]: I1007 15:56:32.460461 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fptmm"] Oct 07 15:56:32 crc kubenswrapper[4672]: W1007 15:56:32.471253 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fb5ad6d_1ae9_402d_80f1_6fba6f3db9b2.slice/crio-a527f83439e28821f21805cae3708106c82e1d755572abe0b1dc27c0e6381b22 WatchSource:0}: Error finding container a527f83439e28821f21805cae3708106c82e1d755572abe0b1dc27c0e6381b22: Status 404 returned error can't find the container with id a527f83439e28821f21805cae3708106c82e1d755572abe0b1dc27c0e6381b22 Oct 07 15:56:32 crc kubenswrapper[4672]: I1007 15:56:32.791506 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-r64p8/crc-debug-vp4rr"] Oct 07 15:56:32 crc kubenswrapper[4672]: I1007 15:56:32.798640 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-r64p8/crc-debug-vp4rr"] Oct 07 15:56:33 crc kubenswrapper[4672]: I1007 15:56:33.056382 4672 generic.go:334] "Generic (PLEG): container finished" podID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerID="202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267" exitCode=0 Oct 07 15:56:33 crc kubenswrapper[4672]: I1007 15:56:33.056435 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fptmm" event={"ID":"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2","Type":"ContainerDied","Data":"202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267"} Oct 07 15:56:33 crc kubenswrapper[4672]: I1007 15:56:33.056486 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fptmm" event={"ID":"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2","Type":"ContainerStarted","Data":"a527f83439e28821f21805cae3708106c82e1d755572abe0b1dc27c0e6381b22"} Oct 07 15:56:33 crc kubenswrapper[4672]: I1007 15:56:33.059169 4672 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 15:56:33 crc kubenswrapper[4672]: I1007 15:56:33.904877 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a5165e8-9b16-49d4-b52f-dad01b31bdd1" path="/var/lib/kubelet/pods/7a5165e8-9b16-49d4-b52f-dad01b31bdd1/volumes" Oct 07 15:56:33 crc kubenswrapper[4672]: I1007 15:56:33.974713 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-r64p8/crc-debug-gj7nk"] Oct 07 15:56:33 crc kubenswrapper[4672]: I1007 15:56:33.976288 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:34 crc kubenswrapper[4672]: I1007 15:56:34.065991 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fptmm" event={"ID":"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2","Type":"ContainerStarted","Data":"b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c"} Oct 07 15:56:34 crc kubenswrapper[4672]: I1007 15:56:34.082155 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt2jq\" (UniqueName: \"kubernetes.io/projected/96fc5307-4072-4944-9492-54bd9ee76015-kube-api-access-qt2jq\") pod \"crc-debug-gj7nk\" (UID: \"96fc5307-4072-4944-9492-54bd9ee76015\") " pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:34 crc kubenswrapper[4672]: I1007 15:56:34.082204 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/96fc5307-4072-4944-9492-54bd9ee76015-host\") pod \"crc-debug-gj7nk\" (UID: \"96fc5307-4072-4944-9492-54bd9ee76015\") " pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:34 crc kubenswrapper[4672]: I1007 15:56:34.183703 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt2jq\" (UniqueName: \"kubernetes.io/projected/96fc5307-4072-4944-9492-54bd9ee76015-kube-api-access-qt2jq\") pod \"crc-debug-gj7nk\" (UID: \"96fc5307-4072-4944-9492-54bd9ee76015\") " pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:34 crc kubenswrapper[4672]: I1007 15:56:34.183742 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/96fc5307-4072-4944-9492-54bd9ee76015-host\") pod \"crc-debug-gj7nk\" (UID: \"96fc5307-4072-4944-9492-54bd9ee76015\") " pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:34 crc kubenswrapper[4672]: I1007 15:56:34.183932 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/96fc5307-4072-4944-9492-54bd9ee76015-host\") pod \"crc-debug-gj7nk\" (UID: \"96fc5307-4072-4944-9492-54bd9ee76015\") " pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:34 crc kubenswrapper[4672]: I1007 15:56:34.200697 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt2jq\" (UniqueName: \"kubernetes.io/projected/96fc5307-4072-4944-9492-54bd9ee76015-kube-api-access-qt2jq\") pod \"crc-debug-gj7nk\" (UID: \"96fc5307-4072-4944-9492-54bd9ee76015\") " pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:34 crc kubenswrapper[4672]: I1007 15:56:34.298297 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:34 crc kubenswrapper[4672]: W1007 15:56:34.323064 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96fc5307_4072_4944_9492_54bd9ee76015.slice/crio-50d2692924c914b0428e1f4fc0f79e0bd2e06fbce54ac6a2766471d7b4c92bf0 WatchSource:0}: Error finding container 50d2692924c914b0428e1f4fc0f79e0bd2e06fbce54ac6a2766471d7b4c92bf0: Status 404 returned error can't find the container with id 50d2692924c914b0428e1f4fc0f79e0bd2e06fbce54ac6a2766471d7b4c92bf0 Oct 07 15:56:35 crc kubenswrapper[4672]: I1007 15:56:35.078596 4672 generic.go:334] "Generic (PLEG): container finished" podID="96fc5307-4072-4944-9492-54bd9ee76015" containerID="6a672e7ba8de6afb7d036695df00d1122b0bcafdd40f1f7da553c043ca8aebda" exitCode=0 Oct 07 15:56:35 crc kubenswrapper[4672]: I1007 15:56:35.078963 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/crc-debug-gj7nk" event={"ID":"96fc5307-4072-4944-9492-54bd9ee76015","Type":"ContainerDied","Data":"6a672e7ba8de6afb7d036695df00d1122b0bcafdd40f1f7da553c043ca8aebda"} Oct 07 15:56:35 crc kubenswrapper[4672]: I1007 15:56:35.079184 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/crc-debug-gj7nk" event={"ID":"96fc5307-4072-4944-9492-54bd9ee76015","Type":"ContainerStarted","Data":"50d2692924c914b0428e1f4fc0f79e0bd2e06fbce54ac6a2766471d7b4c92bf0"} Oct 07 15:56:35 crc kubenswrapper[4672]: I1007 15:56:35.082770 4672 generic.go:334] "Generic (PLEG): container finished" podID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerID="b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c" exitCode=0 Oct 07 15:56:35 crc kubenswrapper[4672]: I1007 15:56:35.082821 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fptmm" event={"ID":"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2","Type":"ContainerDied","Data":"b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c"} Oct 07 15:56:35 crc kubenswrapper[4672]: I1007 15:56:35.137332 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-r64p8/crc-debug-gj7nk"] Oct 07 15:56:35 crc kubenswrapper[4672]: I1007 15:56:35.147885 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-r64p8/crc-debug-gj7nk"] Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.115313 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fptmm" event={"ID":"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2","Type":"ContainerStarted","Data":"e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36"} Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.144972 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fptmm" podStartSLOduration=2.651234386 podStartE2EDuration="5.14494786s" podCreationTimestamp="2025-10-07 15:56:31 +0000 UTC" firstStartedPulling="2025-10-07 15:56:33.058673824 +0000 UTC m=+4070.033852405" lastFinishedPulling="2025-10-07 15:56:35.552387298 +0000 UTC m=+4072.527565879" observedRunningTime="2025-10-07 15:56:36.139966265 +0000 UTC m=+4073.115144836" watchObservedRunningTime="2025-10-07 15:56:36.14494786 +0000 UTC m=+4073.120126451" Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.209988 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.218106 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt2jq\" (UniqueName: \"kubernetes.io/projected/96fc5307-4072-4944-9492-54bd9ee76015-kube-api-access-qt2jq\") pod \"96fc5307-4072-4944-9492-54bd9ee76015\" (UID: \"96fc5307-4072-4944-9492-54bd9ee76015\") " Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.218241 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/96fc5307-4072-4944-9492-54bd9ee76015-host\") pod \"96fc5307-4072-4944-9492-54bd9ee76015\" (UID: \"96fc5307-4072-4944-9492-54bd9ee76015\") " Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.218360 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/96fc5307-4072-4944-9492-54bd9ee76015-host" (OuterVolumeSpecName: "host") pod "96fc5307-4072-4944-9492-54bd9ee76015" (UID: "96fc5307-4072-4944-9492-54bd9ee76015"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.218649 4672 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/96fc5307-4072-4944-9492-54bd9ee76015-host\") on node \"crc\" DevicePath \"\"" Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.228386 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96fc5307-4072-4944-9492-54bd9ee76015-kube-api-access-qt2jq" (OuterVolumeSpecName: "kube-api-access-qt2jq") pod "96fc5307-4072-4944-9492-54bd9ee76015" (UID: "96fc5307-4072-4944-9492-54bd9ee76015"). InnerVolumeSpecName "kube-api-access-qt2jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.320800 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt2jq\" (UniqueName: \"kubernetes.io/projected/96fc5307-4072-4944-9492-54bd9ee76015-kube-api-access-qt2jq\") on node \"crc\" DevicePath \"\"" Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.773800 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/util/0.log" Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.938949 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/util/0.log" Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.939687 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/pull/0.log" Oct 07 15:56:36 crc kubenswrapper[4672]: I1007 15:56:36.986264 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/pull/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.124917 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/crc-debug-gj7nk" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.124917 4672 scope.go:117] "RemoveContainer" containerID="6a672e7ba8de6afb7d036695df00d1122b0bcafdd40f1f7da553c043ca8aebda" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.126572 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/util/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.159530 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/pull/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.175622 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_55b8d742aa38dde5a4fe871e5b350568548d3bb967cb14940132c0ca1cdlxqg_ee34e0a3-7308-4d6e-bfaa-d999a7bde6cf/extract/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.308503 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-hvp85_b5f8d795-a31d-4992-99fc-590848eae6fd/kube-rbac-proxy/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.356349 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-jnwxm_d427841d-eba8-45b1-aa18-de4a5d1fecaa/kube-rbac-proxy/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.408732 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-hvp85_b5f8d795-a31d-4992-99fc-590848eae6fd/manager/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.569141 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-l4zjl_fba2eb6a-4cb0-4fc9-9625-e7a57382e412/kube-rbac-proxy/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.588278 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-jnwxm_d427841d-eba8-45b1-aa18-de4a5d1fecaa/manager/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.617703 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-l4zjl_fba2eb6a-4cb0-4fc9-9625-e7a57382e412/manager/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.814768 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-2mddh_a70458ed-18c5-49ef-8e30-83e39c3ec5e5/kube-rbac-proxy/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.815597 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-2mddh_a70458ed-18c5-49ef-8e30-83e39c3ec5e5/manager/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.901955 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96fc5307-4072-4944-9492-54bd9ee76015" path="/var/lib/kubelet/pods/96fc5307-4072-4944-9492-54bd9ee76015/volumes" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.970693 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-hbh5b_8c3d7854-7f93-46f3-aa4c-1c26dc987cbe/kube-rbac-proxy/0.log" Oct 07 15:56:37 crc kubenswrapper[4672]: I1007 15:56:37.998714 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-hbh5b_8c3d7854-7f93-46f3-aa4c-1c26dc987cbe/manager/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.029562 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-m7vrt_47db8f43-eb79-4338-88e1-1b464c8de306/kube-rbac-proxy/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.167547 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-m7vrt_47db8f43-eb79-4338-88e1-1b464c8de306/manager/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.220908 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-8f4wl_2186b39e-fac1-49ed-a0d3-d925a4a7c2e6/kube-rbac-proxy/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.413991 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-8f4wl_2186b39e-fac1-49ed-a0d3-d925a4a7c2e6/manager/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.433789 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-598db_8b98122f-1fe5-456b-9e60-e0ac676afbfc/kube-rbac-proxy/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.450166 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-598db_8b98122f-1fe5-456b-9e60-e0ac676afbfc/manager/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.632085 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-72pgl_255dad32-3ed4-49eb-8e4d-6cc40d83acc7/kube-rbac-proxy/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.698941 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-72pgl_255dad32-3ed4-49eb-8e4d-6cc40d83acc7/manager/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.779212 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-44jhh_764c99ab-d28b-4a93-b2e6-5abdef46cde8/kube-rbac-proxy/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.818639 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-44jhh_764c99ab-d28b-4a93-b2e6-5abdef46cde8/manager/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.874967 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq_00724d85-8a20-4114-9c19-10171b42d9d1/kube-rbac-proxy/0.log" Oct 07 15:56:38 crc kubenswrapper[4672]: I1007 15:56:38.965910 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-rpxtq_00724d85-8a20-4114-9c19-10171b42d9d1/manager/0.log" Oct 07 15:56:39 crc kubenswrapper[4672]: I1007 15:56:39.086726 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-l2zj6_24fa96bf-c94d-4e2c-974a-d00f03de100d/kube-rbac-proxy/0.log" Oct 07 15:56:39 crc kubenswrapper[4672]: I1007 15:56:39.101422 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-l2zj6_24fa96bf-c94d-4e2c-974a-d00f03de100d/manager/0.log" Oct 07 15:56:39 crc kubenswrapper[4672]: I1007 15:56:39.237697 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-9rcgh_58058dbe-db1a-41b4-8643-21f790efaac3/kube-rbac-proxy/0.log" Oct 07 15:56:39 crc kubenswrapper[4672]: I1007 15:56:39.377962 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-w89qd_8366fa08-0b1b-49f3-8ac1-7df869356e24/kube-rbac-proxy/0.log" Oct 07 15:56:39 crc kubenswrapper[4672]: I1007 15:56:39.398191 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-9rcgh_58058dbe-db1a-41b4-8643-21f790efaac3/manager/0.log" Oct 07 15:56:39 crc kubenswrapper[4672]: I1007 15:56:39.469498 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-w89qd_8366fa08-0b1b-49f3-8ac1-7df869356e24/manager/0.log" Oct 07 15:56:39 crc kubenswrapper[4672]: I1007 15:56:39.893480 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46_51779ddc-1795-45cb-8ba8-8ac78b2c43c8/manager/0.log" Oct 07 15:56:39 crc kubenswrapper[4672]: I1007 15:56:39.919654 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665cgqq46_51779ddc-1795-45cb-8ba8-8ac78b2c43c8/kube-rbac-proxy/0.log" Oct 07 15:56:40 crc kubenswrapper[4672]: I1007 15:56:40.127996 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-86dbb8967f-5bbbt_20bd297b-c47a-4b56-9581-4b4699b7d1d4/kube-rbac-proxy/0.log" Oct 07 15:56:40 crc kubenswrapper[4672]: I1007 15:56:40.167202 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7b5c677b9b-6dzk9_c2468e9c-5d8a-487f-8870-3b89f8c0e905/kube-rbac-proxy/0.log" Oct 07 15:56:40 crc kubenswrapper[4672]: I1007 15:56:40.241668 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7b5c677b9b-6dzk9_c2468e9c-5d8a-487f-8870-3b89f8c0e905/operator/0.log" Oct 07 15:56:40 crc kubenswrapper[4672]: I1007 15:56:40.486207 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-6zf4v_34e36355-636c-455e-a493-0ff6fe705d28/registry-server/0.log" Oct 07 15:56:40 crc kubenswrapper[4672]: I1007 15:56:40.505264 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-g8n44_c5d2b5d0-7471-41e8-a5f9-7930a07fb483/kube-rbac-proxy/0.log" Oct 07 15:56:40 crc kubenswrapper[4672]: I1007 15:56:40.724352 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-xzhc4_5422dad7-a0a4-4116-bee5-8e5580d50530/kube-rbac-proxy/0.log" Oct 07 15:56:40 crc kubenswrapper[4672]: I1007 15:56:40.733521 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-xzhc4_5422dad7-a0a4-4116-bee5-8e5580d50530/manager/0.log" Oct 07 15:56:40 crc kubenswrapper[4672]: I1007 15:56:40.735780 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-g8n44_c5d2b5d0-7471-41e8-a5f9-7930a07fb483/manager/0.log" Oct 07 15:56:40 crc kubenswrapper[4672]: I1007 15:56:40.996459 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-j8bct_2815da8e-e8ee-45b7-a971-b36721ba4322/operator/0.log" Oct 07 15:56:41 crc kubenswrapper[4672]: I1007 15:56:41.003356 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-9zgt5_eae78c95-e2b0-4fdb-8b01-bc446045704f/kube-rbac-proxy/0.log" Oct 07 15:56:41 crc kubenswrapper[4672]: I1007 15:56:41.230627 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-sfdxq_9c7d5ea5-33a0-4006-b116-8cba83443c79/kube-rbac-proxy/0.log" Oct 07 15:56:41 crc kubenswrapper[4672]: I1007 15:56:41.255129 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-9zgt5_eae78c95-e2b0-4fdb-8b01-bc446045704f/manager/0.log" Oct 07 15:56:41 crc kubenswrapper[4672]: I1007 15:56:41.316411 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-86dbb8967f-5bbbt_20bd297b-c47a-4b56-9581-4b4699b7d1d4/manager/0.log" Oct 07 15:56:41 crc kubenswrapper[4672]: I1007 15:56:41.371964 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-sfdxq_9c7d5ea5-33a0-4006-b116-8cba83443c79/manager/0.log" Oct 07 15:56:41 crc kubenswrapper[4672]: I1007 15:56:41.485335 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-2z4fk_a0710ae8-d5bc-4f95-a4ec-76128a3916bb/kube-rbac-proxy/0.log" Oct 07 15:56:41 crc kubenswrapper[4672]: I1007 15:56:41.950494 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:41 crc kubenswrapper[4672]: I1007 15:56:41.950833 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:41 crc kubenswrapper[4672]: I1007 15:56:41.988584 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-2z4fk_a0710ae8-d5bc-4f95-a4ec-76128a3916bb/manager/0.log" Oct 07 15:56:42 crc kubenswrapper[4672]: I1007 15:56:42.004854 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:42 crc kubenswrapper[4672]: I1007 15:56:42.105820 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-27xdb_58c50b3b-c174-42d4-bcc1-d76b0a93cd58/manager/0.log" Oct 07 15:56:42 crc kubenswrapper[4672]: I1007 15:56:42.150423 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-27xdb_58c50b3b-c174-42d4-bcc1-d76b0a93cd58/kube-rbac-proxy/0.log" Oct 07 15:56:42 crc kubenswrapper[4672]: I1007 15:56:42.222822 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:42 crc kubenswrapper[4672]: I1007 15:56:42.277816 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fptmm"] Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.188603 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fptmm" podUID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerName="registry-server" containerID="cri-o://e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36" gracePeriod=2 Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.742518 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.782519 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-utilities\") pod \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.784067 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-utilities" (OuterVolumeSpecName: "utilities") pod "2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" (UID: "2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.885565 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nljzb\" (UniqueName: \"kubernetes.io/projected/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-kube-api-access-nljzb\") pod \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.886056 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-catalog-content\") pod \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\" (UID: \"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2\") " Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.886672 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.890817 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-kube-api-access-nljzb" (OuterVolumeSpecName: "kube-api-access-nljzb") pod "2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" (UID: "2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2"). InnerVolumeSpecName "kube-api-access-nljzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.906835 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" (UID: "2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.987416 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:56:44 crc kubenswrapper[4672]: I1007 15:56:44.987789 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nljzb\" (UniqueName: \"kubernetes.io/projected/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2-kube-api-access-nljzb\") on node \"crc\" DevicePath \"\"" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.201535 4672 generic.go:334] "Generic (PLEG): container finished" podID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerID="e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36" exitCode=0 Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.201587 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fptmm" event={"ID":"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2","Type":"ContainerDied","Data":"e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36"} Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.201614 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fptmm" event={"ID":"2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2","Type":"ContainerDied","Data":"a527f83439e28821f21805cae3708106c82e1d755572abe0b1dc27c0e6381b22"} Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.201650 4672 scope.go:117] "RemoveContainer" containerID="e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.201841 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fptmm" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.260095 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fptmm"] Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.279638 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fptmm"] Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.306874 4672 scope.go:117] "RemoveContainer" containerID="b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.404583 4672 scope.go:117] "RemoveContainer" containerID="202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.441114 4672 scope.go:117] "RemoveContainer" containerID="e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36" Oct 07 15:56:45 crc kubenswrapper[4672]: E1007 15:56:45.444355 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36\": container with ID starting with e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36 not found: ID does not exist" containerID="e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.444431 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36"} err="failed to get container status \"e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36\": rpc error: code = NotFound desc = could not find container \"e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36\": container with ID starting with e78705f2b1bf48052aaeb3548750fe2df80c8b54d90ba4dddcae82c6bc32cc36 not found: ID does not exist" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.444478 4672 scope.go:117] "RemoveContainer" containerID="b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c" Oct 07 15:56:45 crc kubenswrapper[4672]: E1007 15:56:45.445071 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c\": container with ID starting with b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c not found: ID does not exist" containerID="b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.445112 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c"} err="failed to get container status \"b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c\": rpc error: code = NotFound desc = could not find container \"b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c\": container with ID starting with b5dd1b95c6f25b537340193d752d2c56a5976e7fed160f4d475df17a8ba9393c not found: ID does not exist" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.445320 4672 scope.go:117] "RemoveContainer" containerID="202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267" Oct 07 15:56:45 crc kubenswrapper[4672]: E1007 15:56:45.445608 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267\": container with ID starting with 202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267 not found: ID does not exist" containerID="202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.445642 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267"} err="failed to get container status \"202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267\": rpc error: code = NotFound desc = could not find container \"202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267\": container with ID starting with 202f7a44f5e070d9decec9c8f608d27b8a9eaef8460c496a2cbfe7805c62f267 not found: ID does not exist" Oct 07 15:56:45 crc kubenswrapper[4672]: I1007 15:56:45.906573 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" path="/var/lib/kubelet/pods/2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2/volumes" Oct 07 15:56:56 crc kubenswrapper[4672]: I1007 15:56:56.208334 4672 scope.go:117] "RemoveContainer" containerID="7fdaa3cbf9fc8b80eb2a567a2a7642e3e3a079a8c8759883c3e3fd895ce2e7b6" Oct 07 15:56:56 crc kubenswrapper[4672]: I1007 15:56:56.650398 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:56:56 crc kubenswrapper[4672]: I1007 15:56:56.650463 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:56:58 crc kubenswrapper[4672]: I1007 15:56:58.544733 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-dh89s_d26357e0-a31a-460e-94f0-3414790054e6/control-plane-machine-set-operator/0.log" Oct 07 15:56:58 crc kubenswrapper[4672]: I1007 15:56:58.886707 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-w45mb_93d4decb-aa9d-40aa-8e02-c6557c64aacb/kube-rbac-proxy/0.log" Oct 07 15:56:58 crc kubenswrapper[4672]: I1007 15:56:58.974240 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-w45mb_93d4decb-aa9d-40aa-8e02-c6557c64aacb/machine-api-operator/0.log" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.682214 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fqkkm"] Oct 07 15:57:10 crc kubenswrapper[4672]: E1007 15:57:10.683229 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerName="registry-server" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.683243 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerName="registry-server" Oct 07 15:57:10 crc kubenswrapper[4672]: E1007 15:57:10.683268 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerName="extract-content" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.683274 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerName="extract-content" Oct 07 15:57:10 crc kubenswrapper[4672]: E1007 15:57:10.683300 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerName="extract-utilities" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.683306 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerName="extract-utilities" Oct 07 15:57:10 crc kubenswrapper[4672]: E1007 15:57:10.683316 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96fc5307-4072-4944-9492-54bd9ee76015" containerName="container-00" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.683321 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="96fc5307-4072-4944-9492-54bd9ee76015" containerName="container-00" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.683531 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="96fc5307-4072-4944-9492-54bd9ee76015" containerName="container-00" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.683546 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fb5ad6d-1ae9-402d-80f1-6fba6f3db9b2" containerName="registry-server" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.684893 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.701613 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fqkkm"] Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.796623 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnjw4\" (UniqueName: \"kubernetes.io/projected/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-kube-api-access-qnjw4\") pod \"certified-operators-fqkkm\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.796715 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-catalog-content\") pod \"certified-operators-fqkkm\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.796763 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-utilities\") pod \"certified-operators-fqkkm\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.898476 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnjw4\" (UniqueName: \"kubernetes.io/projected/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-kube-api-access-qnjw4\") pod \"certified-operators-fqkkm\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.898678 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-catalog-content\") pod \"certified-operators-fqkkm\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.898730 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-utilities\") pod \"certified-operators-fqkkm\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.899396 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-utilities\") pod \"certified-operators-fqkkm\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:10 crc kubenswrapper[4672]: I1007 15:57:10.899490 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-catalog-content\") pod \"certified-operators-fqkkm\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:11 crc kubenswrapper[4672]: I1007 15:57:11.528237 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnjw4\" (UniqueName: \"kubernetes.io/projected/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-kube-api-access-qnjw4\") pod \"certified-operators-fqkkm\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:11 crc kubenswrapper[4672]: I1007 15:57:11.624661 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:12 crc kubenswrapper[4672]: I1007 15:57:12.170803 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-vbw2d_9d1fa281-ffab-4623-a2cd-a5197c100d6c/cert-manager-controller/0.log" Oct 07 15:57:12 crc kubenswrapper[4672]: I1007 15:57:12.189087 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fqkkm"] Oct 07 15:57:12 crc kubenswrapper[4672]: I1007 15:57:12.364914 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-4dzmr_ce160d50-48d0-433f-924a-7f6e08afbb0b/cert-manager-cainjector/0.log" Oct 07 15:57:12 crc kubenswrapper[4672]: I1007 15:57:12.414586 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-xccp8_303de91b-56fc-4579-b771-882f6ec5a53d/cert-manager-webhook/0.log" Oct 07 15:57:12 crc kubenswrapper[4672]: I1007 15:57:12.434075 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqkkm" event={"ID":"c6b95c19-4009-4fd5-86ab-1de2c2c564fb","Type":"ContainerDied","Data":"f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899"} Oct 07 15:57:12 crc kubenswrapper[4672]: I1007 15:57:12.434002 4672 generic.go:334] "Generic (PLEG): container finished" podID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerID="f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899" exitCode=0 Oct 07 15:57:12 crc kubenswrapper[4672]: I1007 15:57:12.434188 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqkkm" event={"ID":"c6b95c19-4009-4fd5-86ab-1de2c2c564fb","Type":"ContainerStarted","Data":"ff4eb54303be810cd2a3c87c084430b65e0a8d168ca8cae9f63a4a1f59803794"} Oct 07 15:57:13 crc kubenswrapper[4672]: I1007 15:57:13.444317 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqkkm" event={"ID":"c6b95c19-4009-4fd5-86ab-1de2c2c564fb","Type":"ContainerStarted","Data":"5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786"} Oct 07 15:57:14 crc kubenswrapper[4672]: I1007 15:57:14.462659 4672 generic.go:334] "Generic (PLEG): container finished" podID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerID="5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786" exitCode=0 Oct 07 15:57:14 crc kubenswrapper[4672]: I1007 15:57:14.462709 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqkkm" event={"ID":"c6b95c19-4009-4fd5-86ab-1de2c2c564fb","Type":"ContainerDied","Data":"5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786"} Oct 07 15:57:15 crc kubenswrapper[4672]: I1007 15:57:15.474036 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqkkm" event={"ID":"c6b95c19-4009-4fd5-86ab-1de2c2c564fb","Type":"ContainerStarted","Data":"cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be"} Oct 07 15:57:15 crc kubenswrapper[4672]: I1007 15:57:15.498882 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fqkkm" podStartSLOduration=2.728477269 podStartE2EDuration="5.498859114s" podCreationTimestamp="2025-10-07 15:57:10 +0000 UTC" firstStartedPulling="2025-10-07 15:57:12.435527353 +0000 UTC m=+4109.410705934" lastFinishedPulling="2025-10-07 15:57:15.205909198 +0000 UTC m=+4112.181087779" observedRunningTime="2025-10-07 15:57:15.491606313 +0000 UTC m=+4112.466784894" watchObservedRunningTime="2025-10-07 15:57:15.498859114 +0000 UTC m=+4112.474037695" Oct 07 15:57:21 crc kubenswrapper[4672]: I1007 15:57:21.624811 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:21 crc kubenswrapper[4672]: I1007 15:57:21.625328 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:21 crc kubenswrapper[4672]: I1007 15:57:21.671374 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:22 crc kubenswrapper[4672]: I1007 15:57:22.591368 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:22 crc kubenswrapper[4672]: I1007 15:57:22.637670 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fqkkm"] Oct 07 15:57:23 crc kubenswrapper[4672]: I1007 15:57:23.249624 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-qpp9q_c0cf8eba-0e8a-4fbb-a31d-30d55e596f7e/nmstate-console-plugin/0.log" Oct 07 15:57:23 crc kubenswrapper[4672]: I1007 15:57:23.454355 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-lbdvn_8672cc28-f5df-4e9d-8ff8-bcbff6c0b6fd/nmstate-handler/0.log" Oct 07 15:57:23 crc kubenswrapper[4672]: I1007 15:57:23.467179 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-2lmqd_011af584-1b04-4f6c-9cb1-48e1adac8d81/kube-rbac-proxy/0.log" Oct 07 15:57:23 crc kubenswrapper[4672]: I1007 15:57:23.513816 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-2lmqd_011af584-1b04-4f6c-9cb1-48e1adac8d81/nmstate-metrics/0.log" Oct 07 15:57:23 crc kubenswrapper[4672]: I1007 15:57:23.644384 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-p47n6_128e3d01-038d-4b02-91f6-b50124ff721a/nmstate-operator/0.log" Oct 07 15:57:23 crc kubenswrapper[4672]: I1007 15:57:23.750489 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-v685k_38637aa1-8ff7-4b40-b3b6-eed0f91514f6/nmstate-webhook/0.log" Oct 07 15:57:24 crc kubenswrapper[4672]: I1007 15:57:24.563500 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fqkkm" podUID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerName="registry-server" containerID="cri-o://cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be" gracePeriod=2 Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.525757 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.595077 4672 generic.go:334] "Generic (PLEG): container finished" podID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerID="cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be" exitCode=0 Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.595126 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqkkm" event={"ID":"c6b95c19-4009-4fd5-86ab-1de2c2c564fb","Type":"ContainerDied","Data":"cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be"} Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.595152 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqkkm" event={"ID":"c6b95c19-4009-4fd5-86ab-1de2c2c564fb","Type":"ContainerDied","Data":"ff4eb54303be810cd2a3c87c084430b65e0a8d168ca8cae9f63a4a1f59803794"} Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.595170 4672 scope.go:117] "RemoveContainer" containerID="cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.595243 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqkkm" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.613951 4672 scope.go:117] "RemoveContainer" containerID="5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.635050 4672 scope.go:117] "RemoveContainer" containerID="f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.677133 4672 scope.go:117] "RemoveContainer" containerID="cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be" Oct 07 15:57:25 crc kubenswrapper[4672]: E1007 15:57:25.677544 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be\": container with ID starting with cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be not found: ID does not exist" containerID="cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.677607 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be"} err="failed to get container status \"cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be\": rpc error: code = NotFound desc = could not find container \"cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be\": container with ID starting with cf8e2e210f61d0da9b866384dbe0d478d119ef7faca91c4dc38c1acead5ac5be not found: ID does not exist" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.677639 4672 scope.go:117] "RemoveContainer" containerID="5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.678109 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnjw4\" (UniqueName: \"kubernetes.io/projected/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-kube-api-access-qnjw4\") pod \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.678179 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-utilities\") pod \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.678211 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-catalog-content\") pod \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\" (UID: \"c6b95c19-4009-4fd5-86ab-1de2c2c564fb\") " Oct 07 15:57:25 crc kubenswrapper[4672]: E1007 15:57:25.678252 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786\": container with ID starting with 5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786 not found: ID does not exist" containerID="5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.678279 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786"} err="failed to get container status \"5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786\": rpc error: code = NotFound desc = could not find container \"5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786\": container with ID starting with 5585b75557a867a936dea5f892ac56f27c88356b65f97e3ab2d91f3168de6786 not found: ID does not exist" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.678298 4672 scope.go:117] "RemoveContainer" containerID="f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899" Oct 07 15:57:25 crc kubenswrapper[4672]: E1007 15:57:25.678878 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899\": container with ID starting with f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899 not found: ID does not exist" containerID="f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.678909 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899"} err="failed to get container status \"f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899\": rpc error: code = NotFound desc = could not find container \"f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899\": container with ID starting with f97200bdc8a8923c3ca13af6ed84cc78622c2aec8fe256a89e487932df1b9899 not found: ID does not exist" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.679376 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-utilities" (OuterVolumeSpecName: "utilities") pod "c6b95c19-4009-4fd5-86ab-1de2c2c564fb" (UID: "c6b95c19-4009-4fd5-86ab-1de2c2c564fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.685191 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-kube-api-access-qnjw4" (OuterVolumeSpecName: "kube-api-access-qnjw4") pod "c6b95c19-4009-4fd5-86ab-1de2c2c564fb" (UID: "c6b95c19-4009-4fd5-86ab-1de2c2c564fb"). InnerVolumeSpecName "kube-api-access-qnjw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.736124 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c6b95c19-4009-4fd5-86ab-1de2c2c564fb" (UID: "c6b95c19-4009-4fd5-86ab-1de2c2c564fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.780911 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnjw4\" (UniqueName: \"kubernetes.io/projected/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-kube-api-access-qnjw4\") on node \"crc\" DevicePath \"\"" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.780953 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.780964 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6b95c19-4009-4fd5-86ab-1de2c2c564fb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.934904 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fqkkm"] Oct 07 15:57:25 crc kubenswrapper[4672]: I1007 15:57:25.944921 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fqkkm"] Oct 07 15:57:26 crc kubenswrapper[4672]: I1007 15:57:26.650907 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:57:26 crc kubenswrapper[4672]: I1007 15:57:26.651242 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:57:26 crc kubenswrapper[4672]: I1007 15:57:26.651289 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 15:57:26 crc kubenswrapper[4672]: I1007 15:57:26.652048 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7b14dc72c012e9e9b834a4e904bd630ffe81f4604ea50e7fe43d42ffb33e1374"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 15:57:26 crc kubenswrapper[4672]: I1007 15:57:26.652102 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://7b14dc72c012e9e9b834a4e904bd630ffe81f4604ea50e7fe43d42ffb33e1374" gracePeriod=600 Oct 07 15:57:27 crc kubenswrapper[4672]: I1007 15:57:27.617878 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="7b14dc72c012e9e9b834a4e904bd630ffe81f4604ea50e7fe43d42ffb33e1374" exitCode=0 Oct 07 15:57:27 crc kubenswrapper[4672]: I1007 15:57:27.617947 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"7b14dc72c012e9e9b834a4e904bd630ffe81f4604ea50e7fe43d42ffb33e1374"} Oct 07 15:57:27 crc kubenswrapper[4672]: I1007 15:57:27.618408 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerStarted","Data":"4981f5b86009ac284902fb49c091a5aa03fa546a08bf23a0149b575416254f1e"} Oct 07 15:57:27 crc kubenswrapper[4672]: I1007 15:57:27.618432 4672 scope.go:117] "RemoveContainer" containerID="99c21a30905a11778e63b1095709de2193d5298af196cf93a6f91cf325a28b16" Oct 07 15:57:27 crc kubenswrapper[4672]: I1007 15:57:27.901615 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" path="/var/lib/kubelet/pods/c6b95c19-4009-4fd5-86ab-1de2c2c564fb/volumes" Oct 07 15:57:37 crc kubenswrapper[4672]: I1007 15:57:37.542900 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-2qk76_cfac07f1-5471-4d32-9064-01b6f600c640/kube-rbac-proxy/0.log" Oct 07 15:57:37 crc kubenswrapper[4672]: I1007 15:57:37.684480 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-2qk76_cfac07f1-5471-4d32-9064-01b6f600c640/controller/0.log" Oct 07 15:57:37 crc kubenswrapper[4672]: I1007 15:57:37.745340 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-zb5t6_6b9adeec-cd4d-4b83-8dd6-124c90eaa801/frr-k8s-webhook-server/0.log" Oct 07 15:57:37 crc kubenswrapper[4672]: I1007 15:57:37.873836 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-frr-files/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.069081 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-metrics/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.073402 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-frr-files/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.105670 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-reloader/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.130257 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-reloader/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.267679 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-reloader/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.280080 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-metrics/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.312972 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-frr-files/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.336935 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-metrics/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.477557 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-frr-files/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.517199 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-reloader/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.518434 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/cp-metrics/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.532722 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/controller/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.707861 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/frr-metrics/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.720905 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/kube-rbac-proxy/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.753780 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/kube-rbac-proxy-frr/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.920657 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/reloader/0.log" Oct 07 15:57:38 crc kubenswrapper[4672]: I1007 15:57:38.982654 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-79645d9fd4-bzw8f_257d8fce-653e-49bf-ba47-f5d0e156298d/manager/0.log" Oct 07 15:57:39 crc kubenswrapper[4672]: I1007 15:57:39.482157 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-9f889fdd4-rjh7w_82da041e-ab89-4015-baa7-491b55cc00ba/webhook-server/0.log" Oct 07 15:57:39 crc kubenswrapper[4672]: I1007 15:57:39.750435 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dnlzl_11d7aebd-bbc3-49e3-b5ac-53377112f97f/kube-rbac-proxy/0.log" Oct 07 15:57:40 crc kubenswrapper[4672]: I1007 15:57:40.118808 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrmb7_977f6996-2c6a-4749-a101-914929de6749/frr/0.log" Oct 07 15:57:40 crc kubenswrapper[4672]: I1007 15:57:40.209299 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dnlzl_11d7aebd-bbc3-49e3-b5ac-53377112f97f/speaker/0.log" Oct 07 15:57:51 crc kubenswrapper[4672]: I1007 15:57:51.963319 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/util/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.087600 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/pull/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.097331 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/pull/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.104254 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/util/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.275781 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/util/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.300407 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/pull/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.304272 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2s9lm6_00782a44-6ec2-425f-90d2-15ebc242cf3b/extract/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.445331 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-utilities/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.617381 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-utilities/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.619574 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-content/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.628063 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-content/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.768751 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-utilities/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.783712 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/extract-content/0.log" Oct 07 15:57:52 crc kubenswrapper[4672]: I1007 15:57:52.971304 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-utilities/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.189940 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-utilities/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.236750 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-content/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.259460 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-content/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.404711 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lrzlc_25bc32fc-0334-400e-903d-0a107454324e/registry-server/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.460929 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-content/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.486432 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/extract-utilities/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.658716 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/util/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.771658 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2g5m_b530062a-623d-417b-9cea-906c854ee3ed/registry-server/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.925178 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/pull/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.946799 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/pull/0.log" Oct 07 15:57:53 crc kubenswrapper[4672]: I1007 15:57:53.953085 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/util/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.142907 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/pull/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.145671 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/util/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.219065 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7t8nm_a8c3719c-2c5b-489c-a1ae-4879a537b65a/extract/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.322191 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-ljjfp_663cdf72-a5c3-4331-90ce-a3b2dfc41c1d/marketplace-operator/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.373223 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-utilities/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.561329 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-utilities/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.574649 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-content/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.623226 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-content/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.772880 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-utilities/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.773965 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/extract-content/0.log" Oct 07 15:57:54 crc kubenswrapper[4672]: I1007 15:57:54.970180 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kwgqv_20da9666-7ff2-4127-afc2-a5e1b1ca402e/registry-server/0.log" Oct 07 15:57:55 crc kubenswrapper[4672]: I1007 15:57:55.012025 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-utilities/0.log" Oct 07 15:57:55 crc kubenswrapper[4672]: I1007 15:57:55.178094 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-content/0.log" Oct 07 15:57:55 crc kubenswrapper[4672]: I1007 15:57:55.190238 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-utilities/0.log" Oct 07 15:57:55 crc kubenswrapper[4672]: I1007 15:57:55.195613 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-content/0.log" Oct 07 15:57:55 crc kubenswrapper[4672]: I1007 15:57:55.364762 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-content/0.log" Oct 07 15:57:55 crc kubenswrapper[4672]: I1007 15:57:55.382938 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/extract-utilities/0.log" Oct 07 15:57:55 crc kubenswrapper[4672]: I1007 15:57:55.499380 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pj9xc_458481ee-d389-40b6-a1bd-547d99652d6d/registry-server/0.log" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.149867 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qqkcb"] Oct 07 15:59:13 crc kubenswrapper[4672]: E1007 15:59:13.151573 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerName="extract-content" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.151591 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerName="extract-content" Oct 07 15:59:13 crc kubenswrapper[4672]: E1007 15:59:13.151605 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerName="extract-utilities" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.151660 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerName="extract-utilities" Oct 07 15:59:13 crc kubenswrapper[4672]: E1007 15:59:13.151674 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerName="registry-server" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.151680 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerName="registry-server" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.151891 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6b95c19-4009-4fd5-86ab-1de2c2c564fb" containerName="registry-server" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.168733 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.171490 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qqkcb"] Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.248668 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-utilities\") pod \"community-operators-qqkcb\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.248970 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-catalog-content\") pod \"community-operators-qqkcb\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.249224 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8jvp\" (UniqueName: \"kubernetes.io/projected/f12b3ca8-3c26-41b6-bd46-05ab55747de6-kube-api-access-s8jvp\") pod \"community-operators-qqkcb\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.351162 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8jvp\" (UniqueName: \"kubernetes.io/projected/f12b3ca8-3c26-41b6-bd46-05ab55747de6-kube-api-access-s8jvp\") pod \"community-operators-qqkcb\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.351267 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-utilities\") pod \"community-operators-qqkcb\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.351310 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-catalog-content\") pod \"community-operators-qqkcb\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.351859 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-catalog-content\") pod \"community-operators-qqkcb\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.352010 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-utilities\") pod \"community-operators-qqkcb\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.378107 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8jvp\" (UniqueName: \"kubernetes.io/projected/f12b3ca8-3c26-41b6-bd46-05ab55747de6-kube-api-access-s8jvp\") pod \"community-operators-qqkcb\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:13 crc kubenswrapper[4672]: I1007 15:59:13.489091 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:14 crc kubenswrapper[4672]: I1007 15:59:14.063450 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qqkcb"] Oct 07 15:59:14 crc kubenswrapper[4672]: I1007 15:59:14.686626 4672 generic.go:334] "Generic (PLEG): container finished" podID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerID="a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333" exitCode=0 Oct 07 15:59:14 crc kubenswrapper[4672]: I1007 15:59:14.686725 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qqkcb" event={"ID":"f12b3ca8-3c26-41b6-bd46-05ab55747de6","Type":"ContainerDied","Data":"a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333"} Oct 07 15:59:14 crc kubenswrapper[4672]: I1007 15:59:14.686983 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qqkcb" event={"ID":"f12b3ca8-3c26-41b6-bd46-05ab55747de6","Type":"ContainerStarted","Data":"0812ea07a5ad1e2406303c3fb3216586680764ad7c9fcab1069248a2beb86067"} Oct 07 15:59:16 crc kubenswrapper[4672]: I1007 15:59:16.708365 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qqkcb" event={"ID":"f12b3ca8-3c26-41b6-bd46-05ab55747de6","Type":"ContainerStarted","Data":"5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1"} Oct 07 15:59:17 crc kubenswrapper[4672]: I1007 15:59:17.718683 4672 generic.go:334] "Generic (PLEG): container finished" podID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerID="5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1" exitCode=0 Oct 07 15:59:17 crc kubenswrapper[4672]: I1007 15:59:17.718744 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qqkcb" event={"ID":"f12b3ca8-3c26-41b6-bd46-05ab55747de6","Type":"ContainerDied","Data":"5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1"} Oct 07 15:59:18 crc kubenswrapper[4672]: I1007 15:59:18.733927 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qqkcb" event={"ID":"f12b3ca8-3c26-41b6-bd46-05ab55747de6","Type":"ContainerStarted","Data":"2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c"} Oct 07 15:59:18 crc kubenswrapper[4672]: I1007 15:59:18.810629 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qqkcb" podStartSLOduration=2.133283622 podStartE2EDuration="5.810609749s" podCreationTimestamp="2025-10-07 15:59:13 +0000 UTC" firstStartedPulling="2025-10-07 15:59:14.689436511 +0000 UTC m=+4231.664615082" lastFinishedPulling="2025-10-07 15:59:18.366762628 +0000 UTC m=+4235.341941209" observedRunningTime="2025-10-07 15:59:18.809567038 +0000 UTC m=+4235.784745619" watchObservedRunningTime="2025-10-07 15:59:18.810609749 +0000 UTC m=+4235.785788330" Oct 07 15:59:23 crc kubenswrapper[4672]: I1007 15:59:23.489936 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:23 crc kubenswrapper[4672]: I1007 15:59:23.490528 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:23 crc kubenswrapper[4672]: I1007 15:59:23.533767 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:23 crc kubenswrapper[4672]: I1007 15:59:23.826857 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:23 crc kubenswrapper[4672]: I1007 15:59:23.930335 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qqkcb"] Oct 07 15:59:25 crc kubenswrapper[4672]: I1007 15:59:25.794628 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qqkcb" podUID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerName="registry-server" containerID="cri-o://2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c" gracePeriod=2 Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.287347 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.416066 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-catalog-content\") pod \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.416177 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-utilities\") pod \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.416317 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8jvp\" (UniqueName: \"kubernetes.io/projected/f12b3ca8-3c26-41b6-bd46-05ab55747de6-kube-api-access-s8jvp\") pod \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\" (UID: \"f12b3ca8-3c26-41b6-bd46-05ab55747de6\") " Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.417561 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-utilities" (OuterVolumeSpecName: "utilities") pod "f12b3ca8-3c26-41b6-bd46-05ab55747de6" (UID: "f12b3ca8-3c26-41b6-bd46-05ab55747de6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.423843 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f12b3ca8-3c26-41b6-bd46-05ab55747de6-kube-api-access-s8jvp" (OuterVolumeSpecName: "kube-api-access-s8jvp") pod "f12b3ca8-3c26-41b6-bd46-05ab55747de6" (UID: "f12b3ca8-3c26-41b6-bd46-05ab55747de6"). InnerVolumeSpecName "kube-api-access-s8jvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.476164 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f12b3ca8-3c26-41b6-bd46-05ab55747de6" (UID: "f12b3ca8-3c26-41b6-bd46-05ab55747de6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.518043 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.518092 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f12b3ca8-3c26-41b6-bd46-05ab55747de6-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.518102 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8jvp\" (UniqueName: \"kubernetes.io/projected/f12b3ca8-3c26-41b6-bd46-05ab55747de6-kube-api-access-s8jvp\") on node \"crc\" DevicePath \"\"" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.650684 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.650750 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.809656 4672 generic.go:334] "Generic (PLEG): container finished" podID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerID="2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c" exitCode=0 Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.809744 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qqkcb" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.809733 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qqkcb" event={"ID":"f12b3ca8-3c26-41b6-bd46-05ab55747de6","Type":"ContainerDied","Data":"2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c"} Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.809822 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qqkcb" event={"ID":"f12b3ca8-3c26-41b6-bd46-05ab55747de6","Type":"ContainerDied","Data":"0812ea07a5ad1e2406303c3fb3216586680764ad7c9fcab1069248a2beb86067"} Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.809858 4672 scope.go:117] "RemoveContainer" containerID="2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.864824 4672 scope.go:117] "RemoveContainer" containerID="5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.868907 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qqkcb"] Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.877394 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qqkcb"] Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.888458 4672 scope.go:117] "RemoveContainer" containerID="a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.972943 4672 scope.go:117] "RemoveContainer" containerID="2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c" Oct 07 15:59:26 crc kubenswrapper[4672]: E1007 15:59:26.973579 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c\": container with ID starting with 2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c not found: ID does not exist" containerID="2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.973614 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c"} err="failed to get container status \"2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c\": rpc error: code = NotFound desc = could not find container \"2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c\": container with ID starting with 2f2b09afd5bbec437a89624e848d9822712b8eb9bef1bc9ff7ff448e9ab8c08c not found: ID does not exist" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.973639 4672 scope.go:117] "RemoveContainer" containerID="5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1" Oct 07 15:59:26 crc kubenswrapper[4672]: E1007 15:59:26.974141 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1\": container with ID starting with 5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1 not found: ID does not exist" containerID="5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.974161 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1"} err="failed to get container status \"5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1\": rpc error: code = NotFound desc = could not find container \"5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1\": container with ID starting with 5cb82877df3d659391c9f72d7cbac7f248babf52e2bfc958ff25bc4759aff3e1 not found: ID does not exist" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.974179 4672 scope.go:117] "RemoveContainer" containerID="a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333" Oct 07 15:59:26 crc kubenswrapper[4672]: E1007 15:59:26.974756 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333\": container with ID starting with a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333 not found: ID does not exist" containerID="a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333" Oct 07 15:59:26 crc kubenswrapper[4672]: I1007 15:59:26.974797 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333"} err="failed to get container status \"a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333\": rpc error: code = NotFound desc = could not find container \"a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333\": container with ID starting with a812d6f3b65afa809f35c718bff58176875f7a3e2251fa487290b96815cca333 not found: ID does not exist" Oct 07 15:59:27 crc kubenswrapper[4672]: I1007 15:59:27.906526 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" path="/var/lib/kubelet/pods/f12b3ca8-3c26-41b6-bd46-05ab55747de6/volumes" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.481549 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ddpvw"] Oct 07 15:59:54 crc kubenswrapper[4672]: E1007 15:59:54.482621 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerName="registry-server" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.482640 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerName="registry-server" Oct 07 15:59:54 crc kubenswrapper[4672]: E1007 15:59:54.482659 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerName="extract-utilities" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.482667 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerName="extract-utilities" Oct 07 15:59:54 crc kubenswrapper[4672]: E1007 15:59:54.482683 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerName="extract-content" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.482691 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerName="extract-content" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.482978 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="f12b3ca8-3c26-41b6-bd46-05ab55747de6" containerName="registry-server" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.484668 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.492603 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ddpvw"] Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.575496 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-utilities\") pod \"redhat-operators-ddpvw\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.575570 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdggm\" (UniqueName: \"kubernetes.io/projected/bf3a6395-6238-495d-b32a-077361bca787-kube-api-access-fdggm\") pod \"redhat-operators-ddpvw\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.575832 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-catalog-content\") pod \"redhat-operators-ddpvw\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.677907 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-catalog-content\") pod \"redhat-operators-ddpvw\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.678072 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-utilities\") pod \"redhat-operators-ddpvw\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.678105 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdggm\" (UniqueName: \"kubernetes.io/projected/bf3a6395-6238-495d-b32a-077361bca787-kube-api-access-fdggm\") pod \"redhat-operators-ddpvw\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.678546 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-catalog-content\") pod \"redhat-operators-ddpvw\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.678585 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-utilities\") pod \"redhat-operators-ddpvw\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.698551 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdggm\" (UniqueName: \"kubernetes.io/projected/bf3a6395-6238-495d-b32a-077361bca787-kube-api-access-fdggm\") pod \"redhat-operators-ddpvw\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:54 crc kubenswrapper[4672]: I1007 15:59:54.805721 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 15:59:55 crc kubenswrapper[4672]: I1007 15:59:55.290364 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ddpvw"] Oct 07 15:59:55 crc kubenswrapper[4672]: E1007 15:59:55.707188 4672 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf3a6395_6238_495d_b32a_077361bca787.slice/crio-conmon-baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf3a6395_6238_495d_b32a_077361bca787.slice/crio-baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926.scope\": RecentStats: unable to find data in memory cache]" Oct 07 15:59:56 crc kubenswrapper[4672]: I1007 15:59:56.134671 4672 generic.go:334] "Generic (PLEG): container finished" podID="bf3a6395-6238-495d-b32a-077361bca787" containerID="baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926" exitCode=0 Oct 07 15:59:56 crc kubenswrapper[4672]: I1007 15:59:56.134875 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ddpvw" event={"ID":"bf3a6395-6238-495d-b32a-077361bca787","Type":"ContainerDied","Data":"baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926"} Oct 07 15:59:56 crc kubenswrapper[4672]: I1007 15:59:56.135074 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ddpvw" event={"ID":"bf3a6395-6238-495d-b32a-077361bca787","Type":"ContainerStarted","Data":"01111deb3efd742b53fb3784eb30732fdce4952429d3677e610534297a0614e1"} Oct 07 15:59:56 crc kubenswrapper[4672]: I1007 15:59:56.649897 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 15:59:56 crc kubenswrapper[4672]: I1007 15:59:56.649957 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 15:59:57 crc kubenswrapper[4672]: I1007 15:59:57.147059 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ddpvw" event={"ID":"bf3a6395-6238-495d-b32a-077361bca787","Type":"ContainerStarted","Data":"71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2"} Oct 07 15:59:59 crc kubenswrapper[4672]: I1007 15:59:59.171397 4672 generic.go:334] "Generic (PLEG): container finished" podID="1fc62e64-152a-4067-af09-82c3bdb1b25c" containerID="4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f" exitCode=0 Oct 07 15:59:59 crc kubenswrapper[4672]: I1007 15:59:59.171486 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r64p8/must-gather-vvfkw" event={"ID":"1fc62e64-152a-4067-af09-82c3bdb1b25c","Type":"ContainerDied","Data":"4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f"} Oct 07 15:59:59 crc kubenswrapper[4672]: I1007 15:59:59.172343 4672 scope.go:117] "RemoveContainer" containerID="4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f" Oct 07 15:59:59 crc kubenswrapper[4672]: I1007 15:59:59.918184 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-r64p8_must-gather-vvfkw_1fc62e64-152a-4067-af09-82c3bdb1b25c/gather/0.log" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.161232 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2"] Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.162931 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.167948 4672 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.168487 4672 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.187651 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2"] Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.198315 4672 generic.go:334] "Generic (PLEG): container finished" podID="bf3a6395-6238-495d-b32a-077361bca787" containerID="71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2" exitCode=0 Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.198376 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ddpvw" event={"ID":"bf3a6395-6238-495d-b32a-077361bca787","Type":"ContainerDied","Data":"71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2"} Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.304584 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r22g2\" (UniqueName: \"kubernetes.io/projected/232935e0-0865-4b92-a22b-f8c6aacf18b8-kube-api-access-r22g2\") pod \"collect-profiles-29330880-m94s2\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.304921 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/232935e0-0865-4b92-a22b-f8c6aacf18b8-config-volume\") pod \"collect-profiles-29330880-m94s2\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.305062 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/232935e0-0865-4b92-a22b-f8c6aacf18b8-secret-volume\") pod \"collect-profiles-29330880-m94s2\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.407254 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r22g2\" (UniqueName: \"kubernetes.io/projected/232935e0-0865-4b92-a22b-f8c6aacf18b8-kube-api-access-r22g2\") pod \"collect-profiles-29330880-m94s2\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.407358 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/232935e0-0865-4b92-a22b-f8c6aacf18b8-config-volume\") pod \"collect-profiles-29330880-m94s2\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.407390 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/232935e0-0865-4b92-a22b-f8c6aacf18b8-secret-volume\") pod \"collect-profiles-29330880-m94s2\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.409194 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/232935e0-0865-4b92-a22b-f8c6aacf18b8-config-volume\") pod \"collect-profiles-29330880-m94s2\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.416390 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/232935e0-0865-4b92-a22b-f8c6aacf18b8-secret-volume\") pod \"collect-profiles-29330880-m94s2\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.428605 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r22g2\" (UniqueName: \"kubernetes.io/projected/232935e0-0865-4b92-a22b-f8c6aacf18b8-kube-api-access-r22g2\") pod \"collect-profiles-29330880-m94s2\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.491765 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:00 crc kubenswrapper[4672]: I1007 16:00:00.960204 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2"] Oct 07 16:00:00 crc kubenswrapper[4672]: W1007 16:00:00.967996 4672 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod232935e0_0865_4b92_a22b_f8c6aacf18b8.slice/crio-86f583d80b184ccb4701de8761119e2d4fa947ce6897963925deb237ab1cee18 WatchSource:0}: Error finding container 86f583d80b184ccb4701de8761119e2d4fa947ce6897963925deb237ab1cee18: Status 404 returned error can't find the container with id 86f583d80b184ccb4701de8761119e2d4fa947ce6897963925deb237ab1cee18 Oct 07 16:00:01 crc kubenswrapper[4672]: I1007 16:00:01.210834 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" event={"ID":"232935e0-0865-4b92-a22b-f8c6aacf18b8","Type":"ContainerStarted","Data":"c9ba7f6f16c6212eb3b467056cafe3f4b1358ed662224e04821044a9ef3d9e14"} Oct 07 16:00:01 crc kubenswrapper[4672]: I1007 16:00:01.211524 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" event={"ID":"232935e0-0865-4b92-a22b-f8c6aacf18b8","Type":"ContainerStarted","Data":"86f583d80b184ccb4701de8761119e2d4fa947ce6897963925deb237ab1cee18"} Oct 07 16:00:01 crc kubenswrapper[4672]: I1007 16:00:01.216573 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ddpvw" event={"ID":"bf3a6395-6238-495d-b32a-077361bca787","Type":"ContainerStarted","Data":"e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f"} Oct 07 16:00:01 crc kubenswrapper[4672]: I1007 16:00:01.240284 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" podStartSLOduration=1.2402592430000001 podStartE2EDuration="1.240259243s" podCreationTimestamp="2025-10-07 16:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 16:00:01.231418787 +0000 UTC m=+4278.206597378" watchObservedRunningTime="2025-10-07 16:00:01.240259243 +0000 UTC m=+4278.215437824" Oct 07 16:00:01 crc kubenswrapper[4672]: I1007 16:00:01.260337 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ddpvw" podStartSLOduration=2.79331198 podStartE2EDuration="7.260311314s" podCreationTimestamp="2025-10-07 15:59:54 +0000 UTC" firstStartedPulling="2025-10-07 15:59:56.139592004 +0000 UTC m=+4273.114770585" lastFinishedPulling="2025-10-07 16:00:00.606591338 +0000 UTC m=+4277.581769919" observedRunningTime="2025-10-07 16:00:01.253009113 +0000 UTC m=+4278.228187704" watchObservedRunningTime="2025-10-07 16:00:01.260311314 +0000 UTC m=+4278.235489895" Oct 07 16:00:02 crc kubenswrapper[4672]: I1007 16:00:02.229345 4672 generic.go:334] "Generic (PLEG): container finished" podID="232935e0-0865-4b92-a22b-f8c6aacf18b8" containerID="c9ba7f6f16c6212eb3b467056cafe3f4b1358ed662224e04821044a9ef3d9e14" exitCode=0 Oct 07 16:00:02 crc kubenswrapper[4672]: I1007 16:00:02.229488 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" event={"ID":"232935e0-0865-4b92-a22b-f8c6aacf18b8","Type":"ContainerDied","Data":"c9ba7f6f16c6212eb3b467056cafe3f4b1358ed662224e04821044a9ef3d9e14"} Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.730594 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.790177 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/232935e0-0865-4b92-a22b-f8c6aacf18b8-config-volume\") pod \"232935e0-0865-4b92-a22b-f8c6aacf18b8\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.790320 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/232935e0-0865-4b92-a22b-f8c6aacf18b8-secret-volume\") pod \"232935e0-0865-4b92-a22b-f8c6aacf18b8\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.790391 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r22g2\" (UniqueName: \"kubernetes.io/projected/232935e0-0865-4b92-a22b-f8c6aacf18b8-kube-api-access-r22g2\") pod \"232935e0-0865-4b92-a22b-f8c6aacf18b8\" (UID: \"232935e0-0865-4b92-a22b-f8c6aacf18b8\") " Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.792141 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232935e0-0865-4b92-a22b-f8c6aacf18b8-config-volume" (OuterVolumeSpecName: "config-volume") pod "232935e0-0865-4b92-a22b-f8c6aacf18b8" (UID: "232935e0-0865-4b92-a22b-f8c6aacf18b8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.798885 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/232935e0-0865-4b92-a22b-f8c6aacf18b8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "232935e0-0865-4b92-a22b-f8c6aacf18b8" (UID: "232935e0-0865-4b92-a22b-f8c6aacf18b8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.798937 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/232935e0-0865-4b92-a22b-f8c6aacf18b8-kube-api-access-r22g2" (OuterVolumeSpecName: "kube-api-access-r22g2") pod "232935e0-0865-4b92-a22b-f8c6aacf18b8" (UID: "232935e0-0865-4b92-a22b-f8c6aacf18b8"). InnerVolumeSpecName "kube-api-access-r22g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.899968 4672 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/232935e0-0865-4b92-a22b-f8c6aacf18b8-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.900031 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r22g2\" (UniqueName: \"kubernetes.io/projected/232935e0-0865-4b92-a22b-f8c6aacf18b8-kube-api-access-r22g2\") on node \"crc\" DevicePath \"\"" Oct 07 16:00:03 crc kubenswrapper[4672]: I1007 16:00:03.900051 4672 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/232935e0-0865-4b92-a22b-f8c6aacf18b8-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 16:00:04 crc kubenswrapper[4672]: I1007 16:00:04.267757 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" event={"ID":"232935e0-0865-4b92-a22b-f8c6aacf18b8","Type":"ContainerDied","Data":"86f583d80b184ccb4701de8761119e2d4fa947ce6897963925deb237ab1cee18"} Oct 07 16:00:04 crc kubenswrapper[4672]: I1007 16:00:04.268319 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86f583d80b184ccb4701de8761119e2d4fa947ce6897963925deb237ab1cee18" Oct 07 16:00:04 crc kubenswrapper[4672]: I1007 16:00:04.267946 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330880-m94s2" Oct 07 16:00:04 crc kubenswrapper[4672]: I1007 16:00:04.318389 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj"] Oct 07 16:00:04 crc kubenswrapper[4672]: I1007 16:00:04.326557 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330835-wj2pj"] Oct 07 16:00:04 crc kubenswrapper[4672]: I1007 16:00:04.805934 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 16:00:04 crc kubenswrapper[4672]: I1007 16:00:04.806005 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 16:00:05 crc kubenswrapper[4672]: I1007 16:00:05.907137 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f018986c-bf8a-43ac-b89b-dfa0d276b31f" path="/var/lib/kubelet/pods/f018986c-bf8a-43ac-b89b-dfa0d276b31f/volumes" Oct 07 16:00:06 crc kubenswrapper[4672]: I1007 16:00:06.161157 4672 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ddpvw" podUID="bf3a6395-6238-495d-b32a-077361bca787" containerName="registry-server" probeResult="failure" output=< Oct 07 16:00:06 crc kubenswrapper[4672]: timeout: failed to connect service ":50051" within 1s Oct 07 16:00:06 crc kubenswrapper[4672]: > Oct 07 16:00:11 crc kubenswrapper[4672]: I1007 16:00:11.365571 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-r64p8/must-gather-vvfkw"] Oct 07 16:00:11 crc kubenswrapper[4672]: I1007 16:00:11.366416 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-r64p8/must-gather-vvfkw" podUID="1fc62e64-152a-4067-af09-82c3bdb1b25c" containerName="copy" containerID="cri-o://0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8" gracePeriod=2 Oct 07 16:00:11 crc kubenswrapper[4672]: I1007 16:00:11.380592 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-r64p8/must-gather-vvfkw"] Oct 07 16:00:11 crc kubenswrapper[4672]: I1007 16:00:11.860203 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-r64p8_must-gather-vvfkw_1fc62e64-152a-4067-af09-82c3bdb1b25c/copy/0.log" Oct 07 16:00:11 crc kubenswrapper[4672]: I1007 16:00:11.861261 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 16:00:11 crc kubenswrapper[4672]: I1007 16:00:11.971723 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1fc62e64-152a-4067-af09-82c3bdb1b25c-must-gather-output\") pod \"1fc62e64-152a-4067-af09-82c3bdb1b25c\" (UID: \"1fc62e64-152a-4067-af09-82c3bdb1b25c\") " Oct 07 16:00:11 crc kubenswrapper[4672]: I1007 16:00:11.971831 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbbvt\" (UniqueName: \"kubernetes.io/projected/1fc62e64-152a-4067-af09-82c3bdb1b25c-kube-api-access-dbbvt\") pod \"1fc62e64-152a-4067-af09-82c3bdb1b25c\" (UID: \"1fc62e64-152a-4067-af09-82c3bdb1b25c\") " Oct 07 16:00:11 crc kubenswrapper[4672]: I1007 16:00:11.978886 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fc62e64-152a-4067-af09-82c3bdb1b25c-kube-api-access-dbbvt" (OuterVolumeSpecName: "kube-api-access-dbbvt") pod "1fc62e64-152a-4067-af09-82c3bdb1b25c" (UID: "1fc62e64-152a-4067-af09-82c3bdb1b25c"). InnerVolumeSpecName "kube-api-access-dbbvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.074560 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbbvt\" (UniqueName: \"kubernetes.io/projected/1fc62e64-152a-4067-af09-82c3bdb1b25c-kube-api-access-dbbvt\") on node \"crc\" DevicePath \"\"" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.109730 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fc62e64-152a-4067-af09-82c3bdb1b25c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "1fc62e64-152a-4067-af09-82c3bdb1b25c" (UID: "1fc62e64-152a-4067-af09-82c3bdb1b25c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.176481 4672 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1fc62e64-152a-4067-af09-82c3bdb1b25c-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.360833 4672 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-r64p8_must-gather-vvfkw_1fc62e64-152a-4067-af09-82c3bdb1b25c/copy/0.log" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.361384 4672 generic.go:334] "Generic (PLEG): container finished" podID="1fc62e64-152a-4067-af09-82c3bdb1b25c" containerID="0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8" exitCode=143 Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.361435 4672 scope.go:117] "RemoveContainer" containerID="0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.361433 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r64p8/must-gather-vvfkw" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.384343 4672 scope.go:117] "RemoveContainer" containerID="4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.456720 4672 scope.go:117] "RemoveContainer" containerID="0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8" Oct 07 16:00:12 crc kubenswrapper[4672]: E1007 16:00:12.457682 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8\": container with ID starting with 0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8 not found: ID does not exist" containerID="0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.457742 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8"} err="failed to get container status \"0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8\": rpc error: code = NotFound desc = could not find container \"0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8\": container with ID starting with 0153a3c863fbfc6b17d39fef6b2446e7bd4f863b9a195909aa257123cb9029a8 not found: ID does not exist" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.457780 4672 scope.go:117] "RemoveContainer" containerID="4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f" Oct 07 16:00:12 crc kubenswrapper[4672]: E1007 16:00:12.458380 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f\": container with ID starting with 4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f not found: ID does not exist" containerID="4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f" Oct 07 16:00:12 crc kubenswrapper[4672]: I1007 16:00:12.458418 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f"} err="failed to get container status \"4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f\": rpc error: code = NotFound desc = could not find container \"4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f\": container with ID starting with 4d76035b0a126eaa93c0fa8748f6bf7cb17e77c71d57f94f5ec5e7d087e7e45f not found: ID does not exist" Oct 07 16:00:13 crc kubenswrapper[4672]: I1007 16:00:13.914226 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fc62e64-152a-4067-af09-82c3bdb1b25c" path="/var/lib/kubelet/pods/1fc62e64-152a-4067-af09-82c3bdb1b25c/volumes" Oct 07 16:00:14 crc kubenswrapper[4672]: I1007 16:00:14.859504 4672 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 16:00:14 crc kubenswrapper[4672]: I1007 16:00:14.904580 4672 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 16:00:15 crc kubenswrapper[4672]: I1007 16:00:15.706694 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ddpvw"] Oct 07 16:00:16 crc kubenswrapper[4672]: I1007 16:00:16.398753 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ddpvw" podUID="bf3a6395-6238-495d-b32a-077361bca787" containerName="registry-server" containerID="cri-o://e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f" gracePeriod=2 Oct 07 16:00:16 crc kubenswrapper[4672]: I1007 16:00:16.888730 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 16:00:16 crc kubenswrapper[4672]: I1007 16:00:16.979649 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-catalog-content\") pod \"bf3a6395-6238-495d-b32a-077361bca787\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " Oct 07 16:00:16 crc kubenswrapper[4672]: I1007 16:00:16.980010 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-utilities\") pod \"bf3a6395-6238-495d-b32a-077361bca787\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " Oct 07 16:00:16 crc kubenswrapper[4672]: I1007 16:00:16.980126 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdggm\" (UniqueName: \"kubernetes.io/projected/bf3a6395-6238-495d-b32a-077361bca787-kube-api-access-fdggm\") pod \"bf3a6395-6238-495d-b32a-077361bca787\" (UID: \"bf3a6395-6238-495d-b32a-077361bca787\") " Oct 07 16:00:16 crc kubenswrapper[4672]: I1007 16:00:16.981310 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-utilities" (OuterVolumeSpecName: "utilities") pod "bf3a6395-6238-495d-b32a-077361bca787" (UID: "bf3a6395-6238-495d-b32a-077361bca787"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 16:00:16 crc kubenswrapper[4672]: I1007 16:00:16.987629 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf3a6395-6238-495d-b32a-077361bca787-kube-api-access-fdggm" (OuterVolumeSpecName: "kube-api-access-fdggm") pod "bf3a6395-6238-495d-b32a-077361bca787" (UID: "bf3a6395-6238-495d-b32a-077361bca787"). InnerVolumeSpecName "kube-api-access-fdggm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.077395 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf3a6395-6238-495d-b32a-077361bca787" (UID: "bf3a6395-6238-495d-b32a-077361bca787"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.083196 4672 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.083257 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdggm\" (UniqueName: \"kubernetes.io/projected/bf3a6395-6238-495d-b32a-077361bca787-kube-api-access-fdggm\") on node \"crc\" DevicePath \"\"" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.083272 4672 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf3a6395-6238-495d-b32a-077361bca787-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.411720 4672 generic.go:334] "Generic (PLEG): container finished" podID="bf3a6395-6238-495d-b32a-077361bca787" containerID="e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f" exitCode=0 Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.411798 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ddpvw" event={"ID":"bf3a6395-6238-495d-b32a-077361bca787","Type":"ContainerDied","Data":"e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f"} Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.411845 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ddpvw" event={"ID":"bf3a6395-6238-495d-b32a-077361bca787","Type":"ContainerDied","Data":"01111deb3efd742b53fb3784eb30732fdce4952429d3677e610534297a0614e1"} Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.411859 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ddpvw" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.411869 4672 scope.go:117] "RemoveContainer" containerID="e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.447346 4672 scope.go:117] "RemoveContainer" containerID="71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.485103 4672 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ddpvw"] Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.493269 4672 scope.go:117] "RemoveContainer" containerID="baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.497630 4672 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ddpvw"] Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.574557 4672 scope.go:117] "RemoveContainer" containerID="e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f" Oct 07 16:00:17 crc kubenswrapper[4672]: E1007 16:00:17.575125 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f\": container with ID starting with e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f not found: ID does not exist" containerID="e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.575208 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f"} err="failed to get container status \"e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f\": rpc error: code = NotFound desc = could not find container \"e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f\": container with ID starting with e3602c06d9a36eaa38f8ce1fe33db1c62ff09d736f9f0037a3b4e6cd5d3e149f not found: ID does not exist" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.575234 4672 scope.go:117] "RemoveContainer" containerID="71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2" Oct 07 16:00:17 crc kubenswrapper[4672]: E1007 16:00:17.575608 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2\": container with ID starting with 71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2 not found: ID does not exist" containerID="71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.575627 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2"} err="failed to get container status \"71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2\": rpc error: code = NotFound desc = could not find container \"71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2\": container with ID starting with 71157ba4fe06d5b3a8f1e9d6896d3f107f6d46bf4a9f8d9901a0e4f83ce3abd2 not found: ID does not exist" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.575641 4672 scope.go:117] "RemoveContainer" containerID="baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926" Oct 07 16:00:17 crc kubenswrapper[4672]: E1007 16:00:17.575864 4672 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926\": container with ID starting with baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926 not found: ID does not exist" containerID="baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.575887 4672 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926"} err="failed to get container status \"baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926\": rpc error: code = NotFound desc = could not find container \"baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926\": container with ID starting with baa390bb88976372825c5e924acf28b08558a7b05f54735fec3c13f8d3ca8926 not found: ID does not exist" Oct 07 16:00:17 crc kubenswrapper[4672]: I1007 16:00:17.905543 4672 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf3a6395-6238-495d-b32a-077361bca787" path="/var/lib/kubelet/pods/bf3a6395-6238-495d-b32a-077361bca787/volumes" Oct 07 16:00:26 crc kubenswrapper[4672]: I1007 16:00:26.650714 4672 patch_prober.go:28] interesting pod/machine-config-daemon-mklmj container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 16:00:26 crc kubenswrapper[4672]: I1007 16:00:26.651417 4672 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 16:00:26 crc kubenswrapper[4672]: I1007 16:00:26.651463 4672 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" Oct 07 16:00:26 crc kubenswrapper[4672]: I1007 16:00:26.652176 4672 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4981f5b86009ac284902fb49c091a5aa03fa546a08bf23a0149b575416254f1e"} pod="openshift-machine-config-operator/machine-config-daemon-mklmj" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 16:00:26 crc kubenswrapper[4672]: I1007 16:00:26.652237 4672 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerName="machine-config-daemon" containerID="cri-o://4981f5b86009ac284902fb49c091a5aa03fa546a08bf23a0149b575416254f1e" gracePeriod=600 Oct 07 16:00:26 crc kubenswrapper[4672]: E1007 16:00:26.781689 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 16:00:27 crc kubenswrapper[4672]: I1007 16:00:27.518889 4672 generic.go:334] "Generic (PLEG): container finished" podID="492d7244-71b2-4f06-bb99-2f4069a8198c" containerID="4981f5b86009ac284902fb49c091a5aa03fa546a08bf23a0149b575416254f1e" exitCode=0 Oct 07 16:00:27 crc kubenswrapper[4672]: I1007 16:00:27.518946 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" event={"ID":"492d7244-71b2-4f06-bb99-2f4069a8198c","Type":"ContainerDied","Data":"4981f5b86009ac284902fb49c091a5aa03fa546a08bf23a0149b575416254f1e"} Oct 07 16:00:27 crc kubenswrapper[4672]: I1007 16:00:27.518987 4672 scope.go:117] "RemoveContainer" containerID="7b14dc72c012e9e9b834a4e904bd630ffe81f4604ea50e7fe43d42ffb33e1374" Oct 07 16:00:27 crc kubenswrapper[4672]: I1007 16:00:27.519712 4672 scope.go:117] "RemoveContainer" containerID="4981f5b86009ac284902fb49c091a5aa03fa546a08bf23a0149b575416254f1e" Oct 07 16:00:27 crc kubenswrapper[4672]: E1007 16:00:27.519954 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 16:00:39 crc kubenswrapper[4672]: I1007 16:00:39.891913 4672 scope.go:117] "RemoveContainer" containerID="4981f5b86009ac284902fb49c091a5aa03fa546a08bf23a0149b575416254f1e" Oct 07 16:00:39 crc kubenswrapper[4672]: E1007 16:00:39.892740 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 16:00:53 crc kubenswrapper[4672]: I1007 16:00:53.905500 4672 scope.go:117] "RemoveContainer" containerID="4981f5b86009ac284902fb49c091a5aa03fa546a08bf23a0149b575416254f1e" Oct 07 16:00:53 crc kubenswrapper[4672]: E1007 16:00:53.906545 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 16:00:56 crc kubenswrapper[4672]: I1007 16:00:56.404452 4672 scope.go:117] "RemoveContainer" containerID="e58cb2445728215f52ba900dcdfa41019de6abfd24e0889aa31e3d0d7d2c879e" Oct 07 16:00:56 crc kubenswrapper[4672]: I1007 16:00:56.430124 4672 scope.go:117] "RemoveContainer" containerID="c54badaf78d8b3ffc955932e358d78b00d2243603831d2ee73926ac3f872c124" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.156132 4672 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29330881-mh5hb"] Oct 07 16:01:00 crc kubenswrapper[4672]: E1007 16:01:00.158978 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf3a6395-6238-495d-b32a-077361bca787" containerName="registry-server" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.159167 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf3a6395-6238-495d-b32a-077361bca787" containerName="registry-server" Oct 07 16:01:00 crc kubenswrapper[4672]: E1007 16:01:00.159340 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="232935e0-0865-4b92-a22b-f8c6aacf18b8" containerName="collect-profiles" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.159410 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="232935e0-0865-4b92-a22b-f8c6aacf18b8" containerName="collect-profiles" Oct 07 16:01:00 crc kubenswrapper[4672]: E1007 16:01:00.159486 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf3a6395-6238-495d-b32a-077361bca787" containerName="extract-content" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.159547 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf3a6395-6238-495d-b32a-077361bca787" containerName="extract-content" Oct 07 16:01:00 crc kubenswrapper[4672]: E1007 16:01:00.159631 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc62e64-152a-4067-af09-82c3bdb1b25c" containerName="copy" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.159705 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc62e64-152a-4067-af09-82c3bdb1b25c" containerName="copy" Oct 07 16:01:00 crc kubenswrapper[4672]: E1007 16:01:00.159791 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc62e64-152a-4067-af09-82c3bdb1b25c" containerName="gather" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.159866 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc62e64-152a-4067-af09-82c3bdb1b25c" containerName="gather" Oct 07 16:01:00 crc kubenswrapper[4672]: E1007 16:01:00.159973 4672 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf3a6395-6238-495d-b32a-077361bca787" containerName="extract-utilities" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.160091 4672 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf3a6395-6238-495d-b32a-077361bca787" containerName="extract-utilities" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.160436 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc62e64-152a-4067-af09-82c3bdb1b25c" containerName="copy" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.160535 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf3a6395-6238-495d-b32a-077361bca787" containerName="registry-server" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.160615 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc62e64-152a-4067-af09-82c3bdb1b25c" containerName="gather" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.160690 4672 memory_manager.go:354] "RemoveStaleState removing state" podUID="232935e0-0865-4b92-a22b-f8c6aacf18b8" containerName="collect-profiles" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.161640 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.166425 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29330881-mh5hb"] Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.244596 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-fernet-keys\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.244701 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phq25\" (UniqueName: \"kubernetes.io/projected/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-kube-api-access-phq25\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.244770 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-config-data\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.244845 4672 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-combined-ca-bundle\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.346297 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-config-data\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.346361 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-combined-ca-bundle\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.346449 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-fernet-keys\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.346510 4672 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phq25\" (UniqueName: \"kubernetes.io/projected/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-kube-api-access-phq25\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.353332 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-config-data\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.355304 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-combined-ca-bundle\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.361737 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-fernet-keys\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.366655 4672 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phq25\" (UniqueName: \"kubernetes.io/projected/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-kube-api-access-phq25\") pod \"keystone-cron-29330881-mh5hb\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.491138 4672 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:00 crc kubenswrapper[4672]: I1007 16:01:00.963151 4672 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29330881-mh5hb"] Oct 07 16:01:01 crc kubenswrapper[4672]: I1007 16:01:01.904278 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29330881-mh5hb" event={"ID":"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2","Type":"ContainerStarted","Data":"dbc305e0fe901cb4ab892286b462ef7328a340c45475ee9d87d45c88ec5ccd24"} Oct 07 16:01:01 crc kubenswrapper[4672]: I1007 16:01:01.904922 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29330881-mh5hb" event={"ID":"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2","Type":"ContainerStarted","Data":"99d465188c5e32aea8517dda852997ab33dd495381cb3099797b23e3b4f8d554"} Oct 07 16:01:01 crc kubenswrapper[4672]: I1007 16:01:01.930988 4672 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29330881-mh5hb" podStartSLOduration=1.930965622 podStartE2EDuration="1.930965622s" podCreationTimestamp="2025-10-07 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 16:01:01.923931188 +0000 UTC m=+4338.899109769" watchObservedRunningTime="2025-10-07 16:01:01.930965622 +0000 UTC m=+4338.906144203" Oct 07 16:01:03 crc kubenswrapper[4672]: I1007 16:01:03.916620 4672 generic.go:334] "Generic (PLEG): container finished" podID="247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2" containerID="dbc305e0fe901cb4ab892286b462ef7328a340c45475ee9d87d45c88ec5ccd24" exitCode=0 Oct 07 16:01:03 crc kubenswrapper[4672]: I1007 16:01:03.916793 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29330881-mh5hb" event={"ID":"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2","Type":"ContainerDied","Data":"dbc305e0fe901cb4ab892286b462ef7328a340c45475ee9d87d45c88ec5ccd24"} Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.282133 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29330881-mh5hb" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.352558 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-config-data\") pod \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.352857 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phq25\" (UniqueName: \"kubernetes.io/projected/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-kube-api-access-phq25\") pod \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.352977 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-fernet-keys\") pod \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.353138 4672 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-combined-ca-bundle\") pod \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\" (UID: \"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2\") " Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.373466 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-kube-api-access-phq25" (OuterVolumeSpecName: "kube-api-access-phq25") pod "247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2" (UID: "247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2"). InnerVolumeSpecName "kube-api-access-phq25". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.379511 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2" (UID: "247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.387586 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2" (UID: "247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.419123 4672 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-config-data" (OuterVolumeSpecName: "config-data") pod "247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2" (UID: "247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.457246 4672 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.457291 4672 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.457310 4672 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.457325 4672 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phq25\" (UniqueName: \"kubernetes.io/projected/247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2-kube-api-access-phq25\") on node \"crc\" DevicePath \"\"" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.893407 4672 scope.go:117] "RemoveContainer" containerID="4981f5b86009ac284902fb49c091a5aa03fa546a08bf23a0149b575416254f1e" Oct 07 16:01:05 crc kubenswrapper[4672]: E1007 16:01:05.893922 4672 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mklmj_openshift-machine-config-operator(492d7244-71b2-4f06-bb99-2f4069a8198c)\"" pod="openshift-machine-config-operator/machine-config-daemon-mklmj" podUID="492d7244-71b2-4f06-bb99-2f4069a8198c" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.938550 4672 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29330881-mh5hb" event={"ID":"247d5d5b-a8ba-4a5e-91ed-b2eed9eadcd2","Type":"ContainerDied","Data":"99d465188c5e32aea8517dda852997ab33dd495381cb3099797b23e3b4f8d554"} Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.938765 4672 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99d465188c5e32aea8517dda852997ab33dd495381cb3099797b23e3b4f8d554" Oct 07 16:01:05 crc kubenswrapper[4672]: I1007 16:01:05.938657 4672 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29330881-mh5hb" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515071234511024444 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015071234512017362 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015071223614016506 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015071223614015456 5ustar corecore